prefix
stringlengths 82
32.6k
| middle
stringlengths 5
470
| suffix
stringlengths 0
81.2k
| file_path
stringlengths 6
168
| repo_name
stringlengths 16
77
| context
listlengths 5
5
| lang
stringclasses 4
values | ground_truth
stringlengths 5
470
|
---|---|---|---|---|---|---|---|
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger | : ILogger,
): Promise<IResult<StoredProcedureSchema>> { |
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.defaultValue = convertSqlValueToJsValue(defaultValue, type);
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " * procedure results to the correct schema field names.\n * @param {ILogger} logger - The logger to use for logging.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedure<T>(\n storedProcedureName: string,\n input: InputParameters,\n request: Request,\n logger: ILogger,\n info?: GraphQLResolveInfo,",
"score": 50.70530299972888
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureQuery<T>(\n storedProcedureName: string,\n input: InputParameters,",
"score": 49.75778960545752
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " /**\n * Executes a stored procedure for a Mutation operation with the provided input parameters, and returns the result.\n * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureMutation<T>(",
"score": 47.577799077097396
},
{
"filename": "src/lib/types/stored-procedure-schema.ts",
"retrieved_chunk": "import type { StoredProcedureParameter } from '.';\n/**\n * Represents the result of a stored procedure execution.\n */\nexport type StoredProcedureSchema = [\n StoredProcedureParameter,\n {\n storedProcedureDefinition: string;\n },\n];",
"score": 33.244649470173364
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " result[mappedKey] = obj[key];\n }\n return result as T;\n }\n /**\n * Prepares the stored procedure result into a GraphQL result object.\n * @param {IProcedureResult} result - The stored procedure result.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns {IResolverProcedureResult} A prepared GraphQL result object.",
"score": 31.90177833433102
}
] | typescript | : ILogger,
): Promise<IResult<StoredProcedureSchema>> { |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): | IterableIterator<StoredProcedureParameter> { |
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.defaultValue = convertSqlValueToJsValue(defaultValue, type);
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureQuery<T>(\n storedProcedureName: string,\n input: InputParameters,",
"score": 45.06240913757887
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " /**\n * Executes a stored procedure for a Mutation operation with the provided input parameters, and returns the result.\n * @template T - This type parameter represents the type of the value returned by the resolver procedure.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedureMutation<T>(",
"score": 43.32412003288601
},
{
"filename": "src/lib/types/stored-procedure-schema.ts",
"retrieved_chunk": "import type { StoredProcedureParameter } from '.';\n/**\n * Represents the result of a stored procedure execution.\n */\nexport type StoredProcedureSchema = [\n StoredProcedureParameter,\n {\n storedProcedureDefinition: string;\n },\n];",
"score": 43.1599239338179
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " }\n /**\n * Prepares the stored procedure request.\n * @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.\n * @param {StoredProcedureInput} input - The input object.\n * @param {Request} request - The request object.\n * @returns A prepared request object.\n */\n private prepareStoredProcedureRequest(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,",
"score": 41.37071187917653
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " * procedure results to the correct schema field names.\n * @param {ILogger} logger - The logger to use for logging.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedure<T>(\n storedProcedureName: string,\n input: InputParameters,\n request: Request,\n logger: ILogger,\n info?: GraphQLResolveInfo,",
"score": 37.34776673845302
}
] | typescript | IterableIterator<StoredProcedureParameter> { |
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
} | : Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => { |
const types: IndexableTypes = TYPES;
const property = findPropertyCaseInsensitive(types, type);
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
| src/lib/utils/type-map.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 29.807992864678372
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 29.61757251133219
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { ParameterMode } from '.';\n/**\n * Represents a subset of used metadata for an MSSQL stored procedure parameter.\n * @property {string} name - The name of the parameter. Begins with @.\n * @property {string} type - The MSSQL data type of the parameter.\n * @property {ParameterMode} mode - The MSSQL mode of the parameter. Either 'IN', 'INOUT' or 'UNKNOWN'.\n * @property {unknown} defaultValue - The default value of the parameter, if any, or undefined.\n * @property {number} length - The length of character-based parameters, or undefined.\n * @property {number} precision - The precision of floating point parameters, or undefined.\n * @property {number} scale - The scale of floating point parameters, or undefined.",
"score": 18.80691710730278
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " 'NUMERIC_PRECISION as precision, ' +\n 'NUMERIC_SCALE as scale ' +\n 'FROM INFORMATION_SCHEMA.PARAMETERS ' +\n `WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';\n SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,\n );\n const recordSetLength = result.recordsets.length as number;\n if (recordSetLength < 1 || recordSetLength > 2) {\n throw new Error(\n `Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,",
"score": 15.747463752793871
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " const parameterSchemaMap: Map<string, StoredProcedureParameter> =\n schemaResult.recordsets[0].reduce(\n (parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {\n parameterMap.set(item.name, item);\n return parameterMap;\n },\n new Map<string, StoredProcedureParameter>(),\n );\n const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;\n if (storedProcedureDefinition == null) {",
"score": 9.529199215726504
}
] | typescript | : Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => { |
import { camelCase } from 'lodash';
import { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import {
type DriverType,
type PreparedStoredProcedureParameter,
ParameterMode,
type StoredProcedureSchema,
type StoredProcedureParameter,
type ILogger,
type InputParameters,
} from '../types';
import { mapDbTypeToDriverType, replacer } from '../utils';
import { logExecutionBegin, logPerformance, logSafely } from '../logging';
import {
type StoredProcedureCacheManager,
type StoredProcedureMetadataManager,
} from '../stored-procedure';
import { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';
import { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';
/**
* StoredProcedureManager provides methods to interact
* with a Microsoft SQL Server database for managing stored procedures.
*/
export class StoredProcedureManager {
/**
* Creates a new instance of StoredProcedureManager.
*/
constructor(
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager,
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,
) {}
/**
* Executes a stored procedure with the provided input parameters, and returns the result.
* @template TVal - The type of records in the result set.
* @template TRet - The type of the result object to be returned.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {Request} request - The request to execute the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @param {ILogger} logger - The logger to use for logging.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedure<T>(
storedProcedureName: string,
input: InputParameters,
request: Request,
logger: ILogger,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
let startTime = performance.now();
let schema = (await this._storedProcedureCacheManager.tryGetFromCache(storedProcedureName)) as
| IResult<StoredProcedureSchema>
| undefined;
if (schema === undefined) {
logSafely(
logger,
'info',
// Yellow
`\x1b[33mCache miss occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
schema = await this._storedProcedureMetadataManager.getStoredProcedureParameterSchema(
storedProcedureName,
logger,
);
await this._storedProcedureCacheManager.addToCache(storedProcedureName, schema);
} else {
logSafely(
logger,
'info',
// Green
`\x1b[32mCache hit occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
}
logPerformance(logger, 'getStoredProcedureParameterSchema', startTime);
startTime = performance.now();
const storedProcedureParameters =
this._storedProcedureMetadataManager.parseStoredProcedureParameters(
storedProcedureName,
schema,
);
logPerformance(logger, 'parseStoredProcedureParameters', startTime);
startTime = performance.now();
const preparedRequest = this.prepareStoredProcedureRequest(
storedProcedureParameters,
input,
request,
);
logPerformance(logger, 'prepareStoredProcedureRequest', startTime);
startTime = performance.now();
logExecutionBegin(
logger,
`Stored Procedure ${storedProcedureName} with parameters`,
preparedRequest.parameters,
// Green
'32m',
);
const result = await preparedRequest.execute(storedProcedureName);
startTime = performance.now();
const preparedResult = this.prepareStoredProcedureResult(result, info);
logPerformance(logger, 'prepareStoredProcedureResult', startTime);
return preparedResult;
}
private prepareParameters(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
): Map<string, PreparedStoredProcedureParameter> {
// We want to use the inferred DB Stored Procedure schema as the source of truth.
const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();
for (const spParameter of storedProcedureParameters) {
const { name, type, length, precision, scale, ...rest } = spParameter;
const parameterName = name.slice(1);
// Let's use the parameter name in lowercase as the lookup key.
preparedParameters.set(parameterName.toLowerCase(), {
name: parameterName,
type | : mapDbTypeToDriverType({ |
type,
length,
precision,
scale,
}) as DriverType,
value: undefined,
...rest,
});
}
// Populate our input values into the request parameters.
const inputParameters = input as Record<string, unknown>;
for (const inputParameterKey in inputParameters) {
const preparedParameter = preparedParameters.get(inputParameterKey.toLowerCase());
if (preparedParameter != null) {
preparedParameter.value = inputParameters[inputParameterKey];
}
// We don't care about provided input parameters that are missing in the Stored Procedure definition.
}
return preparedParameters;
}
private getMissingRequiredParameters(
parameters: Map<string, PreparedStoredProcedureParameter>,
): PreparedStoredProcedureParameter[] {
// Check what required parameters are missing.
const missingRequiredParameters = [];
for (const parameter of parameters.values()) {
// If they have a default value they can be ommitted from the request.
if (parameter.defaultValue === undefined && parameter.value === undefined) {
missingRequiredParameters.push(parameter);
}
}
return missingRequiredParameters;
}
private addParametersToRequest(
parameters: Map<string, PreparedStoredProcedureParameter>,
request: Request,
): Request {
const preparedRequest = request;
for (const parameter of parameters.values()) {
const { name, type, mode, value, defaultValue } = parameter;
if (defaultValue !== undefined && value === undefined) {
continue;
}
const modeEnum = mode;
if (modeEnum === ParameterMode.IN) {
preparedRequest.input(name, type, value);
} else if (modeEnum === ParameterMode.INOUT) {
preparedRequest.output(name, type, value);
} else {
throw new Error(`Unknown parameter mode: ${mode}`);
}
}
return preparedRequest;
}
/**
* Prepares the stored procedure request.
* @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.
* @param {StoredProcedureInput} input - The input object.
* @param {Request} request - The request object.
* @returns A prepared request object.
*/
private prepareStoredProcedureRequest(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
request: Request,
): Request {
const parameters = this.prepareParameters(storedProcedureParameters, input);
const missingRequiredParameters = this.getMissingRequiredParameters(parameters);
const missingLength = missingRequiredParameters.length;
if (missingLength > 0) {
throw new Error(
`Missing ${missingLength} required parameters: ${missingRequiredParameters
.map((param) => JSON.stringify(param, replacer, 0))
.join(', ')}.`,
);
}
const preparedRequest = this.addParametersToRequest(parameters, request);
return preparedRequest;
}
/**
* Maps the keys of an object based on the provided mapping.
* @template T - The type of the original object.
* @param {T} obj - The object whose keys need to be mapped.
* @param {Record<string, string>} mapping - A dictionary containing the mapping of the original keys to the new keys.
* @returns {T} A new object with the keys mapped according to the provided mapping.
*/
private mapKeysWithMapping<T extends Record<string, unknown>>(
obj: T,
mapping: Record<string, string>,
): T {
const result: Record<string, unknown> = {};
for (const key in obj) {
const mappedKey = mapping[key.toLowerCase()] ?? camelCase(key);
result[mappedKey] = obj[key];
}
return result as T;
}
/**
* Prepares the stored procedure result into a GraphQL result object.
* @param {IProcedureResult} result - The stored procedure result.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns {IResolverProcedureResult} A prepared GraphQL result object.
*/
private prepareStoredProcedureResult<T extends Record<string, unknown>>(
result: IProcedureResult<T>,
info?: GraphQLResolveInfo,
): IResolverProcedureResult<T> {
const { resultSetFields, outputFields } =
info !== undefined
? {
resultSetFields: getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
}
: { resultSetFields: {}, outputFields: {} };
const resultSets = result.recordsets.map((recordset: IRecordSet<Record<string, unknown>>) => {
return recordset.map((record: Record<string, unknown>) =>
this.mapKeysWithMapping(record, resultSetFields),
);
});
const output = this.mapKeysWithMapping(result.output, outputFields);
const preparedResult = {
returnValue: result.returnValue,
resultSets: resultSets as T[][],
rowsAffected: result.rowsAffected,
...output,
};
return preparedResult;
}
}
| src/lib/stored-procedure/stored-procedure-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/utils/type-map.ts",
"retrieved_chunk": " return key;\n }\n }\n return null;\n};\nexport const mapDbTypeToDriverType = ({\n type,\n length,\n precision,\n scale,",
"score": 28.17205688642332
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " const parameterSchemaMap: Map<string, StoredProcedureParameter> =\n schemaResult.recordsets[0].reduce(\n (parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {\n parameterMap.set(item.name, item);\n return parameterMap;\n },\n new Map<string, StoredProcedureParameter>(),\n );\n const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;\n if (storedProcedureDefinition == null) {",
"score": 27.367621523103665
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 27.14532416638235
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " 'NUMERIC_PRECISION as precision, ' +\n 'NUMERIC_SCALE as scale ' +\n 'FROM INFORMATION_SCHEMA.PARAMETERS ' +\n `WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';\n SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,\n );\n const recordSetLength = result.recordsets.length as number;\n if (recordSetLength < 1 || recordSetLength > 2) {\n throw new Error(\n `Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,",
"score": 25.850877290149718
},
{
"filename": "src/lib/utils/type-map.ts",
"retrieved_chunk": "}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {\n const types: IndexableTypes = TYPES;\n const property = findPropertyCaseInsensitive(types, type);\n if (property !== null) {\n const typeFactory = types[property as TypesKey];\n if (isSqlTypeFactoryWithNoParams(typeFactory)) {\n return typeFactory();\n } else if (isSqlTypeFactoryWithLength(typeFactory)) {\n return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);\n } else if (isSqlTypeFactoryWithScale(typeFactory)) {",
"score": 24.928257106503253
}
] | typescript | : mapDbTypeToDriverType({ |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const | astBuilder = new ASTBuilder(cst[0].source); |
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " name: ConcreteTextNode;\n} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;\nexport type CST = ConcreteNode[];\nexport type TemplateMapping = {\n type: ConcreteNodeTypes;\n locStart: (node: Node[]) => number;\n locEnd: (node: Node[]) => number;\n source: string;\n [k: string]: string | number | boolean | object | null;\n};",
"score": 30.90516587180572
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " type: ConcreteNodeTypes.AttributeDoubleQuoted,\n locStart,\n locEnd,\n source,\n name: 0,\n value: 3,\n },\n AttributeSingleQuoted: {\n type: ConcreteNodeTypes.AttributeSingleQuoted,\n locStart,",
"score": 26.906872294799168
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " locEnd,\n source,\n value: 2,\n },\n liquidDropValue: (node: Node) => node.sourceString.trimEnd(),\n ElementNode: 0,\n ElementOpeningTag: {\n type: ConcreteNodeTypes.ElementOpeningTag,\n locStart,\n locEnd,",
"score": 26.568088529187484
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " AttributeDoubleQuoted = 'AttributeDoubleQuoted',\n AttributeSingleQuoted = 'AttributeSingleQuoted',\n AttributeUnquoted = 'AttributeUnquoted',\n AttributeEmpty = 'AttributeEmpty',\n}\nexport type ConcreteNode =\n | ConcreteTextNode\n | ConcreteLiquidDropNode\n | ConcreteElementOpeningTagNode\n | ConcreteElementClosingTagNode",
"score": 25.419727883645656
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " | ConcreteAttributeSingleQuoted\n | ConcreteAttributeUnquoted\n | ConcreteAttributeEmpty;\nexport type ConcreteAttributeDoubleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;\nexport type ConcreteAttributeSingleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;\nexport type ConcreteAttributeUnquoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;\nexport type ConcreteAttributeEmpty = {",
"score": 25.384946214225906
}
] | typescript | astBuilder = new ASTBuilder(cst[0].source); |
import { Node } from 'ohm-js';
import { toAST } from 'ohm-js/extras';
import { CSTParsingError } from '../errors';
import grammar from '../grammar';
export enum ConcreteNodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementOpeningTag = 'ElementOpeningTag',
ElementClosingTag = 'ElementClosingTag',
ElementSelfClosingTag = 'ElementSelfClosingTag',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type ConcreteNode =
| ConcreteTextNode
| ConcreteLiquidDropNode
| ConcreteElementOpeningTagNode
| ConcreteElementClosingTagNode
| ConcreteElementSelfClosingTagNode;
export type ConcreteBasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export type ConcreteTextNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;
export type ConcreteLiquidDropNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;
export type ConcreteElementOpeningTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;
export type ConcreteElementClosingTagNode = {
name: string;
} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;
export type ConcreteElementSelfClosingTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;
export type ConcreteAttributeNodeBase<T> = {
name: ConcreteTextNode;
value: ConcreteTextNode;
} & ConcreteBasicNode<T>;
export type ConcreteAttributeNode =
| ConcreteAttributeDoubleQuoted
| ConcreteAttributeSingleQuoted
| ConcreteAttributeUnquoted
| ConcreteAttributeEmpty;
export type ConcreteAttributeDoubleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;
export type ConcreteAttributeSingleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;
export type ConcreteAttributeUnquoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;
export type ConcreteAttributeEmpty = {
name: ConcreteTextNode;
} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;
export type CST = ConcreteNode[];
export type TemplateMapping = {
type: ConcreteNodeTypes;
locStart: (node: Node[]) => number;
locEnd: (node: Node[]) => number;
source: string;
[k: string]: string | number | boolean | object | null;
};
export type TopLevelFunctionMapping = (...nodes: Node[]) => any;
export type Mapping = {
[k: string]: number | TemplateMapping | TopLevelFunctionMapping;
};
function locStart(nodes: Node[]) {
return nodes[0].source.startIdx;
}
function locEnd(nodes: Node[]) {
return nodes[nodes.length - 1].source.endIdx;
}
export default function sourceToCST(source: string): ConcreteNode[] {
const matchResult = grammar.match(source);
if (matchResult.failed()) {
| throw new CSTParsingError(matchResult); |
}
const textNode = {
type: ConcreteNodeTypes.TextNode,
locStart,
locEnd,
value: function (this: Node) {
return this.sourceString;
},
source,
};
const mapping: Mapping = {
Node: 0,
TextNode: textNode,
liquidDropNode: {
type: ConcreteNodeTypes.LiquidDropNode,
locStart,
locEnd,
source,
value: 2,
},
liquidDropValue: (node: Node) => node.sourceString.trimEnd(),
ElementNode: 0,
ElementOpeningTag: {
type: ConcreteNodeTypes.ElementOpeningTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
ElementClosingTag: {
type: ConcreteNodeTypes.ElementClosingTag,
locStart,
locEnd,
name: 1,
source,
},
ElementSelfClosingTag: {
type: ConcreteNodeTypes.ElementSelfClosingTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
AttributeDoubleQuoted: {
type: ConcreteNodeTypes.AttributeDoubleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeSingleQuoted: {
type: ConcreteNodeTypes.AttributeSingleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeUnquoted: {
type: ConcreteNodeTypes.AttributeUnquoted,
locStart,
locEnd,
source,
name: 0,
value: 2,
},
AttributeEmpty: {
type: ConcreteNodeTypes.AttributeEmpty,
locStart,
locEnd,
source,
name: 0,
},
attributeName: textNode,
attributeDoubleQuotedValue: 0,
attributeSingleQuotedValue: 0,
attributeUnquotedValue: 0,
attributeDoubleQuotedTextNode: textNode,
attributeSingleQuotedTextNode: textNode,
attributeUnquotedTextNode: textNode,
};
const cst = toAST(matchResult, mapping) as ConcreteNode[];
return cst;
}
| src/parser/1-source-to-cst/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": "}\nexport class CSTParsingError extends LoggableError {\n constructor(matchResult: ohm.MatchResult) {\n super({ result: matchResult.message ?? '' });\n this.name = 'CSTParsingError';\n }\n}\nexport class UnknownConcreteNodeTypeError extends LoggableError {\n constructor(message: string, source: string, locStart: number, locEnd: number) {\n super({ result: undefined, message, source, locStart, locEnd });",
"score": 35.83708974232662
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " }\n return astBuilder.finish();\n}\nexport default function sourceToAST(source: string): LiquidXNode[] {\n const cst = sourceToCST(source);\n const ast = cstToAST(cst);\n return ast;\n}",
"score": 26.546773980359003
},
{
"filename": "src/parser/grammar/__tests__/grammar.test.ts",
"retrieved_chunk": " expectMatchSucceeded('Image />').toBe(false);\n expectMatchSucceeded('Image attr />').toBe(false);\n});\nfunction expectMatchSucceeded(source: string) {\n const match = grammar.match(source);\n return expect(match.succeeded());\n}",
"score": 26.446950389127164
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " for (let i = 0; i < cst.length; i += 1) {\n const node = cst[i];\n const prevNode = cst[i - 1];\n // Add whitespaces and linebreaks that went missing after parsing. We don't need to do this\n // if the node is an attribute since whitespaces between attributes is not important to preserve.\n // In fact it would probably break the rendered output due to unexpected text nodes.\n // TODO: This should be handled in the grammar/source-to-cst part instead (if possible).\n if (prevNode?.source && !isAttributeNode(node)) {\n const diff = node.locStart - prevNode.locEnd;\n if (diff > 0) {",
"score": 23.892174544925474
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " finish() {\n if (this.cursor.length > 0) {\n throw new ASTParsingError(\n `LiquidX element '${this.parent?.name}' has no corresponding closing tag.`,\n this.source,\n this.parent?.locStart ?? this.source.length - 1,\n this.parent?.locEnd ?? this.source.length,\n );\n }\n return this.ast;",
"score": 22.99906082787726
}
] | typescript | throw new CSTParsingError(matchResult); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
| astBuilder.close(node, NodeTypes.ElementNode); |
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " case NodeTypes.ElementNode: {\n output += renderElement(node, { withSource, isChildOfElementNode });\n break;\n }\n case NodeTypes.AttributeDoubleQuoted:\n case NodeTypes.AttributeSingleQuoted:\n case NodeTypes.AttributeUnquoted: {\n const name = renderText(node.name);\n let value = null;\n if (node.value.type === NodeTypes.TextNode) {",
"score": 39.164371518974754
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " value = JSON.stringify(renderText(node.value));\n } else {\n value = renderLiquidDrop(node.value);\n }\n output += `${name}: ${value}`;\n break;\n }\n case NodeTypes.AttributeEmpty: {\n const name = renderText(node.name);\n const value = true;",
"score": 29.028674838725433
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " { withSource = false, isChildOfElementNode = false } = {},\n): string {\n let output = '';\n for (let i = 0; i < ast.length; i += 1) {\n const node = ast[i];\n switch (node.type) {\n case NodeTypes.TextNode: {\n output += renderText(node);\n break;\n }",
"score": 27.52707912452614
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " locEnd,\n source,\n value: 2,\n },\n liquidDropValue: (node: Node) => node.sourceString.trimEnd(),\n ElementNode: 0,\n ElementOpeningTag: {\n type: ConcreteNodeTypes.ElementOpeningTag,\n locStart,\n locEnd,",
"score": 23.512371958926185
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " return (this.current || []).length - 1;\n }\n get parent(): ElementNode | undefined {\n if (this.cursor.length == 0) return undefined;\n return deepGet<ElementNode>(dropLast(1, this.cursor), this.ast);\n }\n open(node: ElementNode) {\n this.push(node);\n this.cursor.push(this.currentPosition);\n this.cursor.push('children');",
"score": 19.630724137581392
}
] | typescript | astBuilder.close(node, NodeTypes.ElementNode); |
import { camelCase } from 'lodash';
import { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import {
type DriverType,
type PreparedStoredProcedureParameter,
ParameterMode,
type StoredProcedureSchema,
type StoredProcedureParameter,
type ILogger,
type InputParameters,
} from '../types';
import { mapDbTypeToDriverType, replacer } from '../utils';
import { logExecutionBegin, logPerformance, logSafely } from '../logging';
import {
type StoredProcedureCacheManager,
type StoredProcedureMetadataManager,
} from '../stored-procedure';
import { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';
import { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';
/**
* StoredProcedureManager provides methods to interact
* with a Microsoft SQL Server database for managing stored procedures.
*/
export class StoredProcedureManager {
/**
* Creates a new instance of StoredProcedureManager.
*/
constructor(
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager,
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,
) {}
/**
* Executes a stored procedure with the provided input parameters, and returns the result.
* @template TVal - The type of records in the result set.
* @template TRet - The type of the result object to be returned.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {Request} request - The request to execute the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @param {ILogger} logger - The logger to use for logging.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedure<T>(
storedProcedureName: string,
input: InputParameters,
request: Request,
logger: ILogger,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
let startTime = performance.now();
let schema = (await this._storedProcedureCacheManager.tryGetFromCache(storedProcedureName)) as
| IResult<StoredProcedureSchema>
| undefined;
if (schema === undefined) {
logSafely(
logger,
'info',
// Yellow
`\x1b[33mCache miss occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
schema = await this._storedProcedureMetadataManager.getStoredProcedureParameterSchema(
storedProcedureName,
logger,
);
await | this._storedProcedureCacheManager.addToCache(storedProcedureName, schema); |
} else {
logSafely(
logger,
'info',
// Green
`\x1b[32mCache hit occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
}
logPerformance(logger, 'getStoredProcedureParameterSchema', startTime);
startTime = performance.now();
const storedProcedureParameters =
this._storedProcedureMetadataManager.parseStoredProcedureParameters(
storedProcedureName,
schema,
);
logPerformance(logger, 'parseStoredProcedureParameters', startTime);
startTime = performance.now();
const preparedRequest = this.prepareStoredProcedureRequest(
storedProcedureParameters,
input,
request,
);
logPerformance(logger, 'prepareStoredProcedureRequest', startTime);
startTime = performance.now();
logExecutionBegin(
logger,
`Stored Procedure ${storedProcedureName} with parameters`,
preparedRequest.parameters,
// Green
'32m',
);
const result = await preparedRequest.execute(storedProcedureName);
startTime = performance.now();
const preparedResult = this.prepareStoredProcedureResult(result, info);
logPerformance(logger, 'prepareStoredProcedureResult', startTime);
return preparedResult;
}
private prepareParameters(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
): Map<string, PreparedStoredProcedureParameter> {
// We want to use the inferred DB Stored Procedure schema as the source of truth.
const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();
for (const spParameter of storedProcedureParameters) {
const { name, type, length, precision, scale, ...rest } = spParameter;
const parameterName = name.slice(1);
// Let's use the parameter name in lowercase as the lookup key.
preparedParameters.set(parameterName.toLowerCase(), {
name: parameterName,
type: mapDbTypeToDriverType({
type,
length,
precision,
scale,
}) as DriverType,
value: undefined,
...rest,
});
}
// Populate our input values into the request parameters.
const inputParameters = input as Record<string, unknown>;
for (const inputParameterKey in inputParameters) {
const preparedParameter = preparedParameters.get(inputParameterKey.toLowerCase());
if (preparedParameter != null) {
preparedParameter.value = inputParameters[inputParameterKey];
}
// We don't care about provided input parameters that are missing in the Stored Procedure definition.
}
return preparedParameters;
}
private getMissingRequiredParameters(
parameters: Map<string, PreparedStoredProcedureParameter>,
): PreparedStoredProcedureParameter[] {
// Check what required parameters are missing.
const missingRequiredParameters = [];
for (const parameter of parameters.values()) {
// If they have a default value they can be ommitted from the request.
if (parameter.defaultValue === undefined && parameter.value === undefined) {
missingRequiredParameters.push(parameter);
}
}
return missingRequiredParameters;
}
private addParametersToRequest(
parameters: Map<string, PreparedStoredProcedureParameter>,
request: Request,
): Request {
const preparedRequest = request;
for (const parameter of parameters.values()) {
const { name, type, mode, value, defaultValue } = parameter;
if (defaultValue !== undefined && value === undefined) {
continue;
}
const modeEnum = mode;
if (modeEnum === ParameterMode.IN) {
preparedRequest.input(name, type, value);
} else if (modeEnum === ParameterMode.INOUT) {
preparedRequest.output(name, type, value);
} else {
throw new Error(`Unknown parameter mode: ${mode}`);
}
}
return preparedRequest;
}
/**
* Prepares the stored procedure request.
* @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.
* @param {StoredProcedureInput} input - The input object.
* @param {Request} request - The request object.
* @returns A prepared request object.
*/
private prepareStoredProcedureRequest(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
request: Request,
): Request {
const parameters = this.prepareParameters(storedProcedureParameters, input);
const missingRequiredParameters = this.getMissingRequiredParameters(parameters);
const missingLength = missingRequiredParameters.length;
if (missingLength > 0) {
throw new Error(
`Missing ${missingLength} required parameters: ${missingRequiredParameters
.map((param) => JSON.stringify(param, replacer, 0))
.join(', ')}.`,
);
}
const preparedRequest = this.addParametersToRequest(parameters, request);
return preparedRequest;
}
/**
* Maps the keys of an object based on the provided mapping.
* @template T - The type of the original object.
* @param {T} obj - The object whose keys need to be mapped.
* @param {Record<string, string>} mapping - A dictionary containing the mapping of the original keys to the new keys.
* @returns {T} A new object with the keys mapped according to the provided mapping.
*/
private mapKeysWithMapping<T extends Record<string, unknown>>(
obj: T,
mapping: Record<string, string>,
): T {
const result: Record<string, unknown> = {};
for (const key in obj) {
const mappedKey = mapping[key.toLowerCase()] ?? camelCase(key);
result[mappedKey] = obj[key];
}
return result as T;
}
/**
* Prepares the stored procedure result into a GraphQL result object.
* @param {IProcedureResult} result - The stored procedure result.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns {IResolverProcedureResult} A prepared GraphQL result object.
*/
private prepareStoredProcedureResult<T extends Record<string, unknown>>(
result: IProcedureResult<T>,
info?: GraphQLResolveInfo,
): IResolverProcedureResult<T> {
const { resultSetFields, outputFields } =
info !== undefined
? {
resultSetFields: getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
}
: { resultSetFields: {}, outputFields: {} };
const resultSets = result.recordsets.map((recordset: IRecordSet<Record<string, unknown>>) => {
return recordset.map((record: Record<string, unknown>) =>
this.mapKeysWithMapping(record, resultSetFields),
);
});
const output = this.mapKeysWithMapping(result.output, outputFields);
const preparedResult = {
returnValue: result.returnValue,
resultSets: resultSets as T[][],
rowsAffected: result.rowsAffected,
...output,
};
return preparedResult;
}
}
| src/lib/stored-procedure/stored-procedure-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " storedProcedureName: string,\n input: InputParameters,\n info?: GraphQLResolveInfo,\n ): Promise<IResolverProcedureResult<T>> {\n const startTime = performance.now();\n const logger = this._mutationLogger;\n logExecutionBegin(logger, `Stored Procedure Mutation ${storedProcedureName}`, input);\n const result = await this._databaseExecutor.executeMutationRequest(\n async (request: Request): Promise<IResolverProcedureResult<T>> =>\n await this._storedProcedureManager.executeStoredProcedure(",
"score": 23.002409226004417
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " info?: GraphQLResolveInfo,\n ): Promise<IResolverProcedureResult<T>> {\n const startTime = performance.now();\n const logger = this._queryLogger;\n logExecutionBegin(logger, `Stored Procedure Query ${storedProcedureName} with inputs`, input);\n const result = await this._databaseExecutor.executeQueryRequest(\n async (request: Request): Promise<IResolverProcedureResult<T>> =>\n await this._storedProcedureManager.executeStoredProcedure<T>(\n storedProcedureName,\n input,",
"score": 22.80923909513485
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " private static readonly parameterDefinitionRegex = /(@[\\w]+)\\s+([^\\s]+)\\s*=\\s*([^, ]*),?/gi;\n constructor(private readonly _databaseExecutor: DatabaseExecutor) {}\n /**\n * Parses the stored procedure parameter schema into a StoredProcedureParameter array.\n * @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async getStoredProcedureParameterSchema(\n storedProcedureName: string,\n logger: ILogger,",
"score": 21.198764568317102
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 20.996380089513238
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " /**\n * Parses the stored procedure parameter schema into a StoredProcedureParameter array.\n * @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.\n * @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.\n * @returns A StoredProcedureParameter array.\n */\n public parseStoredProcedureParameters(\n storedProcedureName: string,\n schemaResult: IResult<StoredProcedureSchema>,\n ): IterableIterator<StoredProcedureParameter> {",
"score": 20.295902300498973
}
] | typescript | this._storedProcedureCacheManager.addToCache(storedProcedureName, schema); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new | ASTBuilder(cst[0].source); |
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " name: ConcreteTextNode;\n} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;\nexport type CST = ConcreteNode[];\nexport type TemplateMapping = {\n type: ConcreteNodeTypes;\n locStart: (node: Node[]) => number;\n locEnd: (node: Node[]) => number;\n source: string;\n [k: string]: string | number | boolean | object | null;\n};",
"score": 30.90516587180572
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " type: ConcreteNodeTypes.AttributeDoubleQuoted,\n locStart,\n locEnd,\n source,\n name: 0,\n value: 3,\n },\n AttributeSingleQuoted: {\n type: ConcreteNodeTypes.AttributeSingleQuoted,\n locStart,",
"score": 26.906872294799168
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " locEnd,\n source,\n value: 2,\n },\n liquidDropValue: (node: Node) => node.sourceString.trimEnd(),\n ElementNode: 0,\n ElementOpeningTag: {\n type: ConcreteNodeTypes.ElementOpeningTag,\n locStart,\n locEnd,",
"score": 26.568088529187484
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " AttributeDoubleQuoted = 'AttributeDoubleQuoted',\n AttributeSingleQuoted = 'AttributeSingleQuoted',\n AttributeUnquoted = 'AttributeUnquoted',\n AttributeEmpty = 'AttributeEmpty',\n}\nexport type ConcreteNode =\n | ConcreteTextNode\n | ConcreteLiquidDropNode\n | ConcreteElementOpeningTagNode\n | ConcreteElementClosingTagNode",
"score": 25.419727883645656
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": " | ConcreteAttributeSingleQuoted\n | ConcreteAttributeUnquoted\n | ConcreteAttributeEmpty;\nexport type ConcreteAttributeDoubleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;\nexport type ConcreteAttributeSingleQuoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;\nexport type ConcreteAttributeUnquoted =\n {} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;\nexport type ConcreteAttributeEmpty = {",
"score": 25.384946214225906
}
] | typescript | ASTBuilder(cst[0].source); |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter. | defaultValue = convertSqlValueToJsValue(defaultValue, type); |
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private addParametersToRequest(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n request: Request,\n ): Request {\n const preparedRequest = request;\n for (const parameter of parameters.values()) {\n const { name, type, mode, value, defaultValue } = parameter;\n if (defaultValue !== undefined && value === undefined) {\n continue;\n }",
"score": 39.96148392318941
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " // Check what required parameters are missing.\n const missingRequiredParameters = [];\n for (const parameter of parameters.values()) {\n // If they have a default value they can be ommitted from the request.\n if (parameter.defaultValue === undefined && parameter.value === undefined) {\n missingRequiredParameters.push(parameter);\n }\n }\n return missingRequiredParameters;\n }",
"score": 29.728186542258474
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 24.866840397809803
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { ParameterMode } from '.';\n/**\n * Represents a subset of used metadata for an MSSQL stored procedure parameter.\n * @property {string} name - The name of the parameter. Begins with @.\n * @property {string} type - The MSSQL data type of the parameter.\n * @property {ParameterMode} mode - The MSSQL mode of the parameter. Either 'IN', 'INOUT' or 'UNKNOWN'.\n * @property {unknown} defaultValue - The default value of the parameter, if any, or undefined.\n * @property {number} length - The length of character-based parameters, or undefined.\n * @property {number} precision - The precision of floating point parameters, or undefined.\n * @property {number} scale - The scale of floating point parameters, or undefined.",
"score": 23.55862077589879
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " const modeEnum = mode;\n if (modeEnum === ParameterMode.IN) {\n preparedRequest.input(name, type, value);\n } else if (modeEnum === ParameterMode.INOUT) {\n preparedRequest.output(name, type, value);\n } else {\n throw new Error(`Unknown parameter mode: ${mode}`);\n }\n }\n return preparedRequest;",
"score": 20.519241361763584
}
] | typescript | defaultValue = convertSqlValueToJsValue(defaultValue, type); |
import { Node } from 'ohm-js';
import { toAST } from 'ohm-js/extras';
import { CSTParsingError } from '../errors';
import grammar from '../grammar';
export enum ConcreteNodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementOpeningTag = 'ElementOpeningTag',
ElementClosingTag = 'ElementClosingTag',
ElementSelfClosingTag = 'ElementSelfClosingTag',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type ConcreteNode =
| ConcreteTextNode
| ConcreteLiquidDropNode
| ConcreteElementOpeningTagNode
| ConcreteElementClosingTagNode
| ConcreteElementSelfClosingTagNode;
export type ConcreteBasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export type ConcreteTextNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;
export type ConcreteLiquidDropNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;
export type ConcreteElementOpeningTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;
export type ConcreteElementClosingTagNode = {
name: string;
} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;
export type ConcreteElementSelfClosingTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;
export type ConcreteAttributeNodeBase<T> = {
name: ConcreteTextNode;
value: ConcreteTextNode;
} & ConcreteBasicNode<T>;
export type ConcreteAttributeNode =
| ConcreteAttributeDoubleQuoted
| ConcreteAttributeSingleQuoted
| ConcreteAttributeUnquoted
| ConcreteAttributeEmpty;
export type ConcreteAttributeDoubleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;
export type ConcreteAttributeSingleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;
export type ConcreteAttributeUnquoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;
export type ConcreteAttributeEmpty = {
name: ConcreteTextNode;
} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;
export type CST = ConcreteNode[];
export type TemplateMapping = {
type: ConcreteNodeTypes;
locStart: (node: Node[]) => number;
locEnd: (node: Node[]) => number;
source: string;
[k: string]: string | number | boolean | object | null;
};
export type TopLevelFunctionMapping = (...nodes: Node[]) => any;
export type Mapping = {
[k: string]: number | TemplateMapping | TopLevelFunctionMapping;
};
function locStart(nodes: Node[]) {
return nodes[0].source.startIdx;
}
function locEnd(nodes: Node[]) {
return nodes[nodes.length - 1].source.endIdx;
}
export default function sourceToCST(source: string): ConcreteNode[] {
const | matchResult = grammar.match(source); |
if (matchResult.failed()) {
throw new CSTParsingError(matchResult);
}
const textNode = {
type: ConcreteNodeTypes.TextNode,
locStart,
locEnd,
value: function (this: Node) {
return this.sourceString;
},
source,
};
const mapping: Mapping = {
Node: 0,
TextNode: textNode,
liquidDropNode: {
type: ConcreteNodeTypes.LiquidDropNode,
locStart,
locEnd,
source,
value: 2,
},
liquidDropValue: (node: Node) => node.sourceString.trimEnd(),
ElementNode: 0,
ElementOpeningTag: {
type: ConcreteNodeTypes.ElementOpeningTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
ElementClosingTag: {
type: ConcreteNodeTypes.ElementClosingTag,
locStart,
locEnd,
name: 1,
source,
},
ElementSelfClosingTag: {
type: ConcreteNodeTypes.ElementSelfClosingTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
AttributeDoubleQuoted: {
type: ConcreteNodeTypes.AttributeDoubleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeSingleQuoted: {
type: ConcreteNodeTypes.AttributeSingleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeUnquoted: {
type: ConcreteNodeTypes.AttributeUnquoted,
locStart,
locEnd,
source,
name: 0,
value: 2,
},
AttributeEmpty: {
type: ConcreteNodeTypes.AttributeEmpty,
locStart,
locEnd,
source,
name: 0,
},
attributeName: textNode,
attributeDoubleQuotedValue: 0,
attributeSingleQuotedValue: 0,
attributeUnquotedValue: 0,
attributeDoubleQuotedTextNode: textNode,
attributeSingleQuotedTextNode: textNode,
attributeUnquotedTextNode: textNode,
};
const cst = toAST(matchResult, mapping) as ConcreteNode[];
return cst;
}
| src/parser/1-source-to-cst/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " }\n return astBuilder.finish();\n}\nexport default function sourceToAST(source: string): LiquidXNode[] {\n const cst = sourceToCST(source);\n const ast = cstToAST(cst);\n return ast;\n}",
"score": 26.546773980359003
},
{
"filename": "src/parser/grammar/__tests__/grammar.test.ts",
"retrieved_chunk": " expectMatchSucceeded('Image />').toBe(false);\n expectMatchSucceeded('Image attr />').toBe(false);\n});\nfunction expectMatchSucceeded(source: string) {\n const match = grammar.match(source);\n return expect(match.succeeded());\n}",
"score": 26.446950389127164
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " return output;\n}\nexport default function render(source: string, { withSource = false } = {}) {\n const ast = sourceToAST(source);\n const ouput = renderAST(ast, { withSource });\n return ouput;\n}",
"score": 22.21832799831301
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " for (let i = 0; i < cst.length; i += 1) {\n const node = cst[i];\n const prevNode = cst[i - 1];\n // Add whitespaces and linebreaks that went missing after parsing. We don't need to do this\n // if the node is an attribute since whitespaces between attributes is not important to preserve.\n // In fact it would probably break the rendered output due to unexpected text nodes.\n // TODO: This should be handled in the grammar/source-to-cst part instead (if possible).\n if (prevNode?.source && !isAttributeNode(node)) {\n const diff = node.locStart - prevNode.locEnd;\n if (diff > 0) {",
"score": 21.620545190823137
},
{
"filename": "src/parser/1-source-to-cst/__tests__/utils.ts",
"retrieved_chunk": "import sourceToCST from '../';\nexport function expectOutput(input: string) {\n const output = sourceToCST(input);\n return expect(output);\n}",
"score": 19.428632921089914
}
] | typescript | matchResult = grammar.match(source); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder. | open(toElementNode(node)); |
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " case NodeTypes.ElementNode: {\n output += renderElement(node, { withSource, isChildOfElementNode });\n break;\n }\n case NodeTypes.AttributeDoubleQuoted:\n case NodeTypes.AttributeSingleQuoted:\n case NodeTypes.AttributeUnquoted: {\n const name = renderText(node.name);\n let value = null;\n if (node.value.type === NodeTypes.TextNode) {",
"score": 27.32220269281197
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " value = JSON.stringify(renderText(node.value));\n } else {\n value = renderLiquidDrop(node.value);\n }\n output += `${name}: ${value}`;\n break;\n }\n case NodeTypes.AttributeEmpty: {\n const name = renderText(node.name);\n const value = true;",
"score": 23.469785242069047
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " { withSource = false, isChildOfElementNode = false } = {},\n): string {\n let output = '';\n for (let i = 0; i < ast.length; i += 1) {\n const node = ast[i];\n switch (node.type) {\n case NodeTypes.TextNode: {\n output += renderText(node);\n break;\n }",
"score": 22.30637292538497
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " node.locEnd,\n );\n }\n this.parent.locEnd = node.locEnd;\n this.cursor.pop();\n this.cursor.pop();\n }\n push(node: LiquidXNode) {\n this.current.push(node);\n }",
"score": 21.869415806151352
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " return (this.current || []).length - 1;\n }\n get parent(): ElementNode | undefined {\n if (this.cursor.length == 0) return undefined;\n return deepGet<ElementNode>(dropLast(1, this.cursor), this.ast);\n }\n open(node: ElementNode) {\n this.push(node);\n this.cursor.push(this.currentPosition);\n this.cursor.push('children');",
"score": 21.24790589347748
}
] | typescript | open(toElementNode(node)); |
import { Node } from 'ohm-js';
import { toAST } from 'ohm-js/extras';
import { CSTParsingError } from '../errors';
import grammar from '../grammar';
export enum ConcreteNodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementOpeningTag = 'ElementOpeningTag',
ElementClosingTag = 'ElementClosingTag',
ElementSelfClosingTag = 'ElementSelfClosingTag',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type ConcreteNode =
| ConcreteTextNode
| ConcreteLiquidDropNode
| ConcreteElementOpeningTagNode
| ConcreteElementClosingTagNode
| ConcreteElementSelfClosingTagNode;
export type ConcreteBasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export type ConcreteTextNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.TextNode>;
export type ConcreteLiquidDropNode = {
value: string;
} & ConcreteBasicNode<ConcreteNodeTypes.LiquidDropNode>;
export type ConcreteElementOpeningTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementOpeningTag>;
export type ConcreteElementClosingTagNode = {
name: string;
} & ConcreteBasicNode<ConcreteNodeTypes.ElementClosingTag>;
export type ConcreteElementSelfClosingTagNode = {
name: string;
attributes: ConcreteAttributeNode[];
} & ConcreteBasicNode<ConcreteNodeTypes.ElementSelfClosingTag>;
export type ConcreteAttributeNodeBase<T> = {
name: ConcreteTextNode;
value: ConcreteTextNode;
} & ConcreteBasicNode<T>;
export type ConcreteAttributeNode =
| ConcreteAttributeDoubleQuoted
| ConcreteAttributeSingleQuoted
| ConcreteAttributeUnquoted
| ConcreteAttributeEmpty;
export type ConcreteAttributeDoubleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeDoubleQuoted>;
export type ConcreteAttributeSingleQuoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeSingleQuoted>;
export type ConcreteAttributeUnquoted =
{} & ConcreteAttributeNodeBase<ConcreteNodeTypes.AttributeUnquoted>;
export type ConcreteAttributeEmpty = {
name: ConcreteTextNode;
} & ConcreteBasicNode<ConcreteNodeTypes.AttributeEmpty>;
export type CST = ConcreteNode[];
export type TemplateMapping = {
type: ConcreteNodeTypes;
locStart: (node: Node[]) => number;
locEnd: (node: Node[]) => number;
source: string;
[k: string]: string | number | boolean | object | null;
};
export type TopLevelFunctionMapping = (...nodes: Node[]) => any;
export type Mapping = {
[k: string]: number | TemplateMapping | TopLevelFunctionMapping;
};
function locStart(nodes: Node[]) {
return nodes[0].source.startIdx;
}
function locEnd(nodes: Node[]) {
return nodes[nodes.length - 1].source.endIdx;
}
export default function sourceToCST(source: string): ConcreteNode[] {
const matchResult = grammar.match(source);
if (matchResult.failed()) {
throw | new CSTParsingError(matchResult); |
}
const textNode = {
type: ConcreteNodeTypes.TextNode,
locStart,
locEnd,
value: function (this: Node) {
return this.sourceString;
},
source,
};
const mapping: Mapping = {
Node: 0,
TextNode: textNode,
liquidDropNode: {
type: ConcreteNodeTypes.LiquidDropNode,
locStart,
locEnd,
source,
value: 2,
},
liquidDropValue: (node: Node) => node.sourceString.trimEnd(),
ElementNode: 0,
ElementOpeningTag: {
type: ConcreteNodeTypes.ElementOpeningTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
ElementClosingTag: {
type: ConcreteNodeTypes.ElementClosingTag,
locStart,
locEnd,
name: 1,
source,
},
ElementSelfClosingTag: {
type: ConcreteNodeTypes.ElementSelfClosingTag,
locStart,
locEnd,
name: 1,
attributes: 2,
source,
},
AttributeDoubleQuoted: {
type: ConcreteNodeTypes.AttributeDoubleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeSingleQuoted: {
type: ConcreteNodeTypes.AttributeSingleQuoted,
locStart,
locEnd,
source,
name: 0,
value: 3,
},
AttributeUnquoted: {
type: ConcreteNodeTypes.AttributeUnquoted,
locStart,
locEnd,
source,
name: 0,
value: 2,
},
AttributeEmpty: {
type: ConcreteNodeTypes.AttributeEmpty,
locStart,
locEnd,
source,
name: 0,
},
attributeName: textNode,
attributeDoubleQuotedValue: 0,
attributeSingleQuotedValue: 0,
attributeUnquotedValue: 0,
attributeDoubleQuotedTextNode: textNode,
attributeSingleQuotedTextNode: textNode,
attributeUnquotedTextNode: textNode,
};
const cst = toAST(matchResult, mapping) as ConcreteNode[];
return cst;
}
| src/parser/1-source-to-cst/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/errors.ts",
"retrieved_chunk": "}\nexport class CSTParsingError extends LoggableError {\n constructor(matchResult: ohm.MatchResult) {\n super({ result: matchResult.message ?? '' });\n this.name = 'CSTParsingError';\n }\n}\nexport class UnknownConcreteNodeTypeError extends LoggableError {\n constructor(message: string, source: string, locStart: number, locEnd: number) {\n super({ result: undefined, message, source, locStart, locEnd });",
"score": 33.65754547339022
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " }\n return astBuilder.finish();\n}\nexport default function sourceToAST(source: string): LiquidXNode[] {\n const cst = sourceToCST(source);\n const ast = cstToAST(cst);\n return ast;\n}",
"score": 24.772271932615567
},
{
"filename": "src/parser/grammar/__tests__/grammar.test.ts",
"retrieved_chunk": " expectMatchSucceeded('Image />').toBe(false);\n expectMatchSucceeded('Image attr />').toBe(false);\n});\nfunction expectMatchSucceeded(source: string) {\n const match = grammar.match(source);\n return expect(match.succeeded());\n}",
"score": 24.732435760229865
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " finish() {\n if (this.cursor.length > 0) {\n throw new ASTParsingError(\n `LiquidX element '${this.parent?.name}' has no corresponding closing tag.`,\n this.source,\n this.parent?.locStart ?? this.source.length - 1,\n this.parent?.locEnd ?? this.source.length,\n );\n }\n return this.ast;",
"score": 21.51213975254423
},
{
"filename": "src/parser/2-cst-to-ast/index.ts",
"retrieved_chunk": " return (\n node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||\n node.type === ConcreteNodeTypes.AttributeSingleQuoted ||\n node.type === ConcreteNodeTypes.AttributeUnquoted ||\n node.type === ConcreteNodeTypes.AttributeEmpty\n );\n}\nfunction cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {\n if (cst.length === 0) return [];\n const astBuilder = new ASTBuilder(cst[0].source);",
"score": 21.394112518371045
}
] | typescript | new CSTParsingError(matchResult); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder.push(
toTextNode({
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
| return astBuilder.finish(); |
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " }\n close(\n node: ConcreteElementClosingTagNode | ConcreteElementSelfClosingTagNode,\n nodeType: NodeTypes.ElementNode,\n ) {\n if (!this.parent || this.parent.name !== node.name || this.parent.type !== nodeType) {\n throw new ASTParsingError(\n `LiquidX element '${node.name}' has no corresponding opening tag`,\n this.source,\n node.locStart,",
"score": 17.664645170521545
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": "}\nfunction renderEndMarker(node: ElementNode) {\n return `{% # LIQUIDX:END - SOURCE ${JSON.stringify(\n node.source.slice(node.locStart, node.locEnd),\n )} %}`;\n}\nfunction renderElement(\n node: ElementNode,\n { withSource = false, isChildOfElementNode = false } = {},\n) {",
"score": 17.048392409960854
},
{
"filename": "src/parser/2-cst-to-ast/utils.ts",
"retrieved_chunk": "export function deepGet<T = any>(path: (string | number)[], obj: any): T {\n return path.reduce((curr: any, k: string | number) => {\n if (curr && curr[k] !== undefined) return curr[k];\n return undefined;\n }, obj);\n}\nexport function dropLast<T>(num: number, xs: readonly T[]) {\n const result = [...xs];\n for (let i = 0; i < num; i += 1) {\n result.pop();",
"score": 15.988857469789696
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " output += `${name}: ${value}`;\n break;\n }\n default: {\n console.log(node);\n // TODO\n throw new Error('');\n }\n }\n }",
"score": 15.697693249918927
},
{
"filename": "src/parser/1-source-to-cst/index.ts",
"retrieved_chunk": "export type TopLevelFunctionMapping = (...nodes: Node[]) => any;\nexport type Mapping = {\n [k: string]: number | TemplateMapping | TopLevelFunctionMapping;\n};\nfunction locStart(nodes: Node[]) {\n return nodes[0].source.startIdx;\n}\nfunction locEnd(nodes: Node[]) {\n return nodes[nodes.length - 1].source.endIdx;\n}",
"score": 15.417657052787996
}
] | typescript | return astBuilder.finish(); |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
| parameterMap.set(item.name, item); |
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter.defaultValue = convertSqlValueToJsValue(defaultValue, type);
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/types/stored-procedure-schema.ts",
"retrieved_chunk": "import type { StoredProcedureParameter } from '.';\n/**\n * Represents the result of a stored procedure execution.\n */\nexport type StoredProcedureSchema = [\n StoredProcedureParameter,\n {\n storedProcedureDefinition: string;\n },\n];",
"score": 29.51664433894729
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " const preparedResult = this.prepareStoredProcedureResult(result, info);\n logPerformance(logger, 'prepareStoredProcedureResult', startTime);\n return preparedResult;\n }\n private prepareParameters(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,\n input: InputParameters,\n ): Map<string, PreparedStoredProcedureParameter> {\n // We want to use the inferred DB Stored Procedure schema as the source of truth.\n const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();",
"score": 28.298484042590193
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " }\n /**\n * Prepares the stored procedure request.\n * @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.\n * @param {StoredProcedureInput} input - The input object.\n * @param {Request} request - The request object.\n * @returns A prepared request object.\n */\n private prepareStoredProcedureRequest(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,",
"score": 27.861597281964098
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 23.301350763430953
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": "import { camelCase } from 'lodash';\nimport { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';\nimport { type GraphQLResolveInfo } from 'graphql';\nimport {\n type DriverType,\n type PreparedStoredProcedureParameter,\n ParameterMode,\n type StoredProcedureSchema,\n type StoredProcedureParameter,\n type ILogger,",
"score": 21.266393341743957
}
] | typescript | parameterMap.set(item.name, item); |
import sourceToCST, {
ConcreteAttributeNode,
ConcreteElementOpeningTagNode,
ConcreteElementSelfClosingTagNode,
ConcreteLiquidDropNode,
ConcreteNode,
ConcreteNodeTypes,
ConcreteTextNode,
} from '../1-source-to-cst';
import { UnknownConcreteNodeTypeError } from '../errors';
import ASTBuilder from './ast-builder';
export type BasicNode<T> = {
type: T;
locStart: number;
locEnd: number;
source: string;
};
export enum NodeTypes {
TextNode = 'TextNode',
LiquidDropNode = 'LiquidDropNode',
ElementNode = 'ElementNode',
AttributeDoubleQuoted = 'AttributeDoubleQuoted',
AttributeSingleQuoted = 'AttributeSingleQuoted',
AttributeUnquoted = 'AttributeUnquoted',
AttributeEmpty = 'AttributeEmpty',
}
export type TextNode = {
value: string;
} & BasicNode<NodeTypes.TextNode>;
export type LiquidDropNode = {
value: string;
} & BasicNode<NodeTypes.LiquidDropNode>;
export type LiquidXNode = TextNode | LiquidDropNode | ElementNode | AttributeNode;
export type ElementNode = {
name: string;
source: string;
attributes: AttributeNode[];
children: LiquidXNode[];
} & BasicNode<NodeTypes.ElementNode>;
export type AttributeNode =
| AttributeDoubleQuoted
| AttributeSingleQuoted
| AttributeUnquoted
| AttributeEmpty;
export type AttributeNodeBase<T> = {
name: TextNode;
value: TextNode | LiquidDropNode;
} & BasicNode<T>;
export type AttributeDoubleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeDoubleQuoted>;
export type AttributeSingleQuoted = {} & AttributeNodeBase<NodeTypes.AttributeSingleQuoted>;
export type AttributeUnquoted = {} & AttributeNodeBase<NodeTypes.AttributeUnquoted>;
export type AttributeEmpty = { name: TextNode } & BasicNode<NodeTypes.AttributeEmpty>;
function toTextNode(node: ConcreteTextNode): TextNode {
return {
type: NodeTypes.TextNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toLiquidDropNode(node: ConcreteLiquidDropNode): LiquidDropNode {
return {
type: NodeTypes.LiquidDropNode,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
value: node.value,
};
}
function toElementNode(
node: ConcreteElementOpeningTagNode | ConcreteElementSelfClosingTagNode,
): ElementNode {
return {
type: NodeTypes.ElementNode,
locStart: node.locStart,
locEnd: node.locEnd,
name: node.name,
source: node.source,
attributes: toAttributes(node.attributes),
children: [],
};
}
function toAttributes(attributes: ConcreteAttributeNode[]) {
return cstToAST(attributes) as AttributeNode[];
}
function toAttributeValue(value: ConcreteTextNode | ConcreteLiquidDropNode) {
return cstToAST([value])[0] as TextNode | LiquidDropNode;
}
function isAttributeNode(node: any): boolean {
return (
node.type === ConcreteNodeTypes.AttributeDoubleQuoted ||
node.type === ConcreteNodeTypes.AttributeSingleQuoted ||
node.type === ConcreteNodeTypes.AttributeUnquoted ||
node.type === ConcreteNodeTypes.AttributeEmpty
);
}
function cstToAST(cst: ConcreteNode[] | ConcreteAttributeNode[]) {
if (cst.length === 0) return [];
const astBuilder = new ASTBuilder(cst[0].source);
for (let i = 0; i < cst.length; i += 1) {
const node = cst[i];
const prevNode = cst[i - 1];
// Add whitespaces and linebreaks that went missing after parsing. We don't need to do this
// if the node is an attribute since whitespaces between attributes is not important to preserve.
// In fact it would probably break the rendered output due to unexpected text nodes.
// TODO: This should be handled in the grammar/source-to-cst part instead (if possible).
if (prevNode?.source && !isAttributeNode(node)) {
const diff = node.locStart - prevNode.locEnd;
if (diff > 0) {
astBuilder. | push(
toTextNode({ |
type: ConcreteNodeTypes.TextNode,
locStart: prevNode.locEnd,
locEnd: node.locStart,
source: node.source,
value: prevNode.source.slice(prevNode.locEnd, node.locStart),
}),
);
}
}
switch (node.type) {
case ConcreteNodeTypes.TextNode: {
astBuilder.push(toTextNode(node));
break;
}
case ConcreteNodeTypes.LiquidDropNode: {
astBuilder.push(toLiquidDropNode(node));
break;
}
case ConcreteNodeTypes.ElementOpeningTag: {
astBuilder.open(toElementNode(node));
break;
}
case ConcreteNodeTypes.ElementClosingTag: {
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.ElementSelfClosingTag: {
astBuilder.open(toElementNode(node));
astBuilder.close(node, NodeTypes.ElementNode);
break;
}
case ConcreteNodeTypes.AttributeDoubleQuoted:
case ConcreteNodeTypes.AttributeSingleQuoted:
case ConcreteNodeTypes.AttributeUnquoted: {
const attributeNode: AttributeDoubleQuoted | AttributeSingleQuoted | AttributeUnquoted = {
type: node.type as unknown as
| NodeTypes.AttributeDoubleQuoted
| NodeTypes.AttributeSingleQuoted
| NodeTypes.AttributeUnquoted,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
value: toAttributeValue(node.value),
};
astBuilder.push(attributeNode);
break;
}
case ConcreteNodeTypes.AttributeEmpty: {
const attributeNode: AttributeEmpty = {
type: NodeTypes.AttributeEmpty,
locStart: node.locStart,
locEnd: node.locEnd,
source: node.source,
name: cstToAST([node.name])[0] as TextNode,
};
astBuilder.push(attributeNode);
break;
}
default: {
throw new UnknownConcreteNodeTypeError(
'',
(node as any)?.source,
(node as any)?.locStart,
(node as any)?.locEnd,
);
}
}
}
return astBuilder.finish();
}
export default function sourceToAST(source: string): LiquidXNode[] {
const cst = sourceToCST(source);
const ast = cstToAST(cst);
return ast;
}
| src/parser/2-cst-to-ast/index.ts | unshopable-liquidx-a101873 | [
{
"filename": "src/parser/2-cst-to-ast/__tests__/element-node.test.ts",
"retrieved_chunk": " expectOutput(input).toHaveProperty('0.children.0.children.0.type', NodeTypes.TextNode);\n});\nit('should throw an error if corresponding closing tag is missing', () => {\n const input = '<Box>';\n expectErrorMessage(input).toMatchSnapshot();\n});\nit('should throw an error if corresponding opening tag is missing', () => {\n const input = '</Box>';\n expectErrorMessage(input).toMatchSnapshot();\n});",
"score": 33.151477161285904
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " attributes.push(childrenAttribute);\n }\n const renderedAttributes = node.attributes.map((attribute) => renderAST([attribute]));\n const separator = ', ';\n const attributesString =\n renderedAttributes.length > 0 ? `${separator}${renderedAttributes.join(separator)}` : '';\n output += `{% render '${node.name}'${attributesString} %}`;\n if (withSource && !isChildOfElementNode) {\n output += renderEndMarker(node);\n }",
"score": 30.111412858190448
},
{
"filename": "src/renderer/index.ts",
"retrieved_chunk": " let output = '';\n const attributes = node.attributes;\n if (withSource && !isChildOfElementNode) {\n output += renderStartMarker();\n }\n if (node.children.length > 0) {\n const captureName = `${node.name}Children`;\n output += `{% capture ${captureName} %}`;\n output += renderAST(node.children, { withSource, isChildOfElementNode: true });\n output += '{% endcapture %}';",
"score": 28.29669078840398
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " }\n close(\n node: ConcreteElementClosingTagNode | ConcreteElementSelfClosingTagNode,\n nodeType: NodeTypes.ElementNode,\n ) {\n if (!this.parent || this.parent.name !== node.name || this.parent.type !== nodeType) {\n throw new ASTParsingError(\n `LiquidX element '${node.name}' has no corresponding opening tag`,\n this.source,\n node.locStart,",
"score": 23.86637422844987
},
{
"filename": "src/parser/2-cst-to-ast/ast-builder.ts",
"retrieved_chunk": " return (this.current || []).length - 1;\n }\n get parent(): ElementNode | undefined {\n if (this.cursor.length == 0) return undefined;\n return deepGet<ElementNode>(dropLast(1, this.cursor), this.ast);\n }\n open(node: ElementNode) {\n this.push(node);\n this.cursor.push(this.currentPosition);\n this.cursor.push('children');",
"score": 23.80613056744093
}
] | typescript | push(
toTextNode({ |
import { camelCase } from 'lodash';
import { type Request, type IProcedureResult, type IResult, type IRecordSet } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import {
type DriverType,
type PreparedStoredProcedureParameter,
ParameterMode,
type StoredProcedureSchema,
type StoredProcedureParameter,
type ILogger,
type InputParameters,
} from '../types';
import { mapDbTypeToDriverType, replacer } from '../utils';
import { logExecutionBegin, logPerformance, logSafely } from '../logging';
import {
type StoredProcedureCacheManager,
type StoredProcedureMetadataManager,
} from '../stored-procedure';
import { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';
import { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';
/**
* StoredProcedureManager provides methods to interact
* with a Microsoft SQL Server database for managing stored procedures.
*/
export class StoredProcedureManager {
/**
* Creates a new instance of StoredProcedureManager.
*/
constructor(
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager,
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,
) {}
/**
* Executes a stored procedure with the provided input parameters, and returns the result.
* @template TVal - The type of records in the result set.
* @template TRet - The type of the result object to be returned.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {Request} request - The request to execute the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @param {ILogger} logger - The logger to use for logging.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedure<T>(
storedProcedureName: string,
input: InputParameters,
request: Request,
logger: ILogger,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
let startTime = performance.now();
let schema = (await this._storedProcedureCacheManager.tryGetFromCache(storedProcedureName)) as
| IResult<StoredProcedureSchema>
| undefined;
if (schema === undefined) {
logSafely(
logger,
'info',
// Yellow
`\x1b[33mCache miss occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
schema = await this._storedProcedureMetadataManager.getStoredProcedureParameterSchema(
storedProcedureName,
logger,
);
await this._storedProcedureCacheManager.addToCache(storedProcedureName, schema);
} else {
logSafely(
logger,
'info',
// Green
`\x1b[32mCache hit occurred while retrieving the cached schema for ${storedProcedureName}\x1b[0m`,
);
}
logPerformance(logger, 'getStoredProcedureParameterSchema', startTime);
startTime = performance.now();
const storedProcedureParameters =
this._storedProcedureMetadataManager.parseStoredProcedureParameters(
storedProcedureName,
schema,
);
logPerformance(logger, 'parseStoredProcedureParameters', startTime);
startTime = performance.now();
const preparedRequest = this.prepareStoredProcedureRequest(
storedProcedureParameters,
input,
request,
);
logPerformance(logger, 'prepareStoredProcedureRequest', startTime);
startTime = performance.now();
logExecutionBegin(
logger,
`Stored Procedure ${storedProcedureName} with parameters`,
preparedRequest.parameters,
// Green
'32m',
);
const result = await preparedRequest.execute(storedProcedureName);
startTime = performance.now();
const preparedResult = this.prepareStoredProcedureResult(result, info);
logPerformance(logger, 'prepareStoredProcedureResult', startTime);
return preparedResult;
}
private prepareParameters(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
): Map<string, PreparedStoredProcedureParameter> {
// We want to use the inferred DB Stored Procedure schema as the source of truth.
const preparedParameters = new Map<string, PreparedStoredProcedureParameter>();
for (const spParameter of storedProcedureParameters) {
const { name, type, length, precision, scale, ...rest } = spParameter;
const parameterName = name.slice(1);
// Let's use the parameter name in lowercase as the lookup key.
preparedParameters.set(parameterName.toLowerCase(), {
name: parameterName,
type: mapDbTypeToDriverType({
type,
length,
precision,
scale,
}) as DriverType,
value: undefined,
...rest,
});
}
// Populate our input values into the request parameters.
const inputParameters = input as Record<string, unknown>;
for (const inputParameterKey in inputParameters) {
const preparedParameter = preparedParameters.get(inputParameterKey.toLowerCase());
if (preparedParameter != null) {
preparedParameter.value = inputParameters[inputParameterKey];
}
// We don't care about provided input parameters that are missing in the Stored Procedure definition.
}
return preparedParameters;
}
private getMissingRequiredParameters(
parameters: Map<string, PreparedStoredProcedureParameter>,
): PreparedStoredProcedureParameter[] {
// Check what required parameters are missing.
const missingRequiredParameters = [];
for (const parameter of parameters.values()) {
// If they have a default value they can be ommitted from the request.
if (parameter.defaultValue === undefined && parameter.value === undefined) {
missingRequiredParameters.push(parameter);
}
}
return missingRequiredParameters;
}
private addParametersToRequest(
parameters: Map<string, PreparedStoredProcedureParameter>,
request: Request,
): Request {
const preparedRequest = request;
for (const parameter of parameters.values()) {
const { name, type, mode, value, defaultValue } = parameter;
if (defaultValue !== undefined && value === undefined) {
continue;
}
const modeEnum = mode;
if (modeEnum === ParameterMode.IN) {
preparedRequest.input(name, type, value);
} else if (modeEnum === ParameterMode.INOUT) {
preparedRequest.output(name, type, value);
} else {
throw new Error(`Unknown parameter mode: ${mode}`);
}
}
return preparedRequest;
}
/**
* Prepares the stored procedure request.
* @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.
* @param {StoredProcedureInput} input - The input object.
* @param {Request} request - The request object.
* @returns A prepared request object.
*/
private prepareStoredProcedureRequest(
storedProcedureParameters: IterableIterator<StoredProcedureParameter>,
input: InputParameters,
request: Request,
): Request {
const parameters = this.prepareParameters(storedProcedureParameters, input);
const missingRequiredParameters = this.getMissingRequiredParameters(parameters);
const missingLength = missingRequiredParameters.length;
if (missingLength > 0) {
throw new Error(
`Missing ${missingLength} required parameters: ${missingRequiredParameters
.map((param) => JSON.stringify(param, replacer, 0))
.join(', ')}.`,
);
}
const preparedRequest = this.addParametersToRequest(parameters, request);
return preparedRequest;
}
/**
* Maps the keys of an object based on the provided mapping.
* @template T - The type of the original object.
* @param {T} obj - The object whose keys need to be mapped.
* @param {Record<string, string>} mapping - A dictionary containing the mapping of the original keys to the new keys.
* @returns {T} A new object with the keys mapped according to the provided mapping.
*/
private mapKeysWithMapping<T extends Record<string, unknown>>(
obj: T,
mapping: Record<string, string>,
): T {
const result: Record<string, unknown> = {};
for (const key in obj) {
const mappedKey = mapping[key.toLowerCase()] ?? camelCase(key);
result[mappedKey] = obj[key];
}
return result as T;
}
/**
* Prepares the stored procedure result into a GraphQL result object.
* @param {IProcedureResult} result - The stored procedure result.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns {IResolverProcedureResult} A prepared GraphQL result object.
*/
private prepareStoredProcedureResult<T extends Record<string, unknown>>(
result: IProcedureResult<T>,
info?: GraphQLResolveInfo,
): IResolverProcedureResult<T> {
const { resultSetFields, outputFields } =
info !== undefined
? {
resultSetFields: | getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
} |
: { resultSetFields: {}, outputFields: {} };
const resultSets = result.recordsets.map((recordset: IRecordSet<Record<string, unknown>>) => {
return recordset.map((record: Record<string, unknown>) =>
this.mapKeysWithMapping(record, resultSetFields),
);
});
const output = this.mapKeysWithMapping(result.output, outputFields);
const preparedResult = {
returnValue: result.returnValue,
resultSets: resultSets as T[][],
rowsAffected: result.rowsAffected,
...output,
};
return preparedResult;
}
}
| src/lib/stored-procedure/stored-procedure-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " info?: GraphQLResolveInfo,\n ): Promise<IResolverProcedureResult<T>> {\n const startTime = performance.now();\n const logger = this._queryLogger;\n logExecutionBegin(logger, `Stored Procedure Query ${storedProcedureName} with inputs`, input);\n const result = await this._databaseExecutor.executeQueryRequest(\n async (request: Request): Promise<IResolverProcedureResult<T>> =>\n await this._storedProcedureManager.executeStoredProcedure<T>(\n storedProcedureName,\n input,",
"score": 20.293573610634176
},
{
"filename": "src/lib/datasource/mssql-datasource.ts",
"retrieved_chunk": " storedProcedureName: string,\n input: InputParameters,\n info?: GraphQLResolveInfo,\n ): Promise<IResolverProcedureResult<T>> {\n const startTime = performance.now();\n const logger = this._mutationLogger;\n logExecutionBegin(logger, `Stored Procedure Mutation ${storedProcedureName}`, input);\n const result = await this._databaseExecutor.executeMutationRequest(\n async (request: Request): Promise<IResolverProcedureResult<T>> =>\n await this._storedProcedureManager.executeStoredProcedure(",
"score": 19.731997046007265
},
{
"filename": "src/lib/types/i-resolver-procedure-result.ts",
"retrieved_chunk": "/**\n * Represents a GraphQL resolver stored procedure result.\n * The format of the result is: a single resultSet property, followed by\n * any additional output properties that were returned by the stored procedure.\n */\nexport interface IResolverProcedureResult<T> extends Record<string, unknown> {\n returnValue?: number;\n resultSets?: T[][];\n rowsAffected?: number[];\n}",
"score": 16.634221636627817
},
{
"filename": "src/lib/utils/graphql-helper.ts",
"retrieved_chunk": " info: GraphQLResolveInfo,\n nodeName: string,\n): Record<string, string> {\n const siblingFields: Record<string, string> = {};\n info.fieldNodes.forEach((fieldNode) => {\n visit(fieldNode, {\n Field(node) {\n const isTargetNode = node.name.value.toLowerCase() === nodeName.toLowerCase();\n if (isTargetNode) {\n return false;",
"score": 13.130318558483527
},
{
"filename": "src/lib/utils/graphql-helper.ts",
"retrieved_chunk": " info: GraphQLResolveInfo,\n nodeName: string,\n): Record<string, string> {\n const targetNode = findNodeByName(info, nodeName);\n // If the target node is not found, return an empty dictionary\n if (targetNode === undefined) {\n return {};\n }\n // If the target node is found, return its subfield names\n return getSelectionSetNames(targetNode);",
"score": 13.050338616536102
}
] | typescript | getNodeSelectionSetNames(info, 'resultSets'),
outputFields: getFieldNamesExcludingNode(info, 'resultSets'),
} |
import type { Request } from 'mssql';
import { type GraphQLResolveInfo } from 'graphql';
import { DevConsoleLogger, logExecutionBegin, logExecutionEnd, logSafely } from '../logging';
import { DatabaseExecutor } from '../executor';
import { ConnectionManager } from '../utils';
import {
StoredProcedureManager,
StoredProcedureCacheManager,
StoredProcedureMetadataManager,
} from '../stored-procedure';
import type { MSSQLOptions, ILogger, IResolverProcedureResult, InputParameters } from '../types';
/**
* A GraphQL DataSource backed by a Microsoft SQL Server database.
* Maintains separate caching for Query and Mutation operations.
* Maintains a global connection pool cache to reuse connections.
*/
export class MSSQLDataSource {
private readonly _queryOptions: MSSQLOptions;
private readonly _mutationOptions: MSSQLOptions;
private readonly _queryLogger: ILogger;
private readonly _mutationLogger: ILogger;
private readonly _connectionManager: ConnectionManager;
private readonly _databaseExecutor: DatabaseExecutor;
private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager;
private readonly _storedProcedureCacheManager: StoredProcedureCacheManager;
private readonly _storedProcedureManager: StoredProcedureManager;
/**
* Creates a new MSSQLDataSource with the given options.
* @param queryOptions The options for Query operations
* @param mutationOptions The options for Mutation operations
*/
constructor(
queryOptions: MSSQLOptions = MSSQLDataSource.defaultOptions,
mutationOptions: MSSQLOptions = MSSQLDataSource.defaultOptions,
) {
this._queryOptions = queryOptions;
this._mutationOptions = mutationOptions;
const defaultOptions = MSSQLDataSource.defaultOptions;
this._queryLogger =
queryOptions.logger !== undefined ? queryOptions.logger : (defaultOptions.logger as ILogger);
this._mutationLogger =
mutationOptions.logger !== undefined
? mutationOptions.logger
: (defaultOptions.logger as ILogger);
this._connectionManager = new ConnectionManager(
this._queryOptions.config,
this._mutationOptions.config,
);
this._databaseExecutor = new DatabaseExecutor(this._connectionManager);
this._storedProcedureMetadataManager = new StoredProcedureMetadataManager(
this._databaseExecutor,
);
this._storedProcedureCacheManager = new StoredProcedureCacheManager();
this._storedProcedureManager = new StoredProcedureManager(
this._storedProcedureCacheManager,
this._storedProcedureMetadataManager,
);
}
/**
* Executes a stored procedure for a Query operation with the provided input parameters, and returns the result.
* @template T - This type parameter represents the type of the value returned by the resolver procedure.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedureQuery<T>(
storedProcedureName: string,
| input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> { |
const startTime = performance.now();
const logger = this._queryLogger;
logExecutionBegin(logger, `Stored Procedure Query ${storedProcedureName} with inputs`, input);
const result = await this._databaseExecutor.executeQueryRequest(
async (request: Request): Promise<IResolverProcedureResult<T>> =>
await this._storedProcedureManager.executeStoredProcedure<T>(
storedProcedureName,
input,
request,
logger,
info,
),
logger,
);
logExecutionEnd(logger, `Stored Procedure Query ${storedProcedureName}`, startTime);
logSafely(logger, 'info', `------------------`);
return result;
}
/**
* Executes a stored procedure for a Mutation operation with the provided input parameters, and returns the result.
* @template T - This type parameter represents the type of the value returned by the resolver procedure.
* @param {string} storedProcedureName - The name of the stored procedure to execute.
* @param {StoredProcedureInput} input - The input parameters for the stored procedure.
* @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored
* procedure results to the correct schema field names.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async executeStoredProcedureMutation<T>(
storedProcedureName: string,
input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> {
const startTime = performance.now();
const logger = this._mutationLogger;
logExecutionBegin(logger, `Stored Procedure Mutation ${storedProcedureName}`, input);
const result = await this._databaseExecutor.executeMutationRequest(
async (request: Request): Promise<IResolverProcedureResult<T>> =>
await this._storedProcedureManager.executeStoredProcedure(
storedProcedureName,
input,
request,
logger,
info,
),
logger,
);
logExecutionEnd(logger, `Stored Procedure Mutation ${storedProcedureName}`, startTime);
return result;
}
/**
* Default options for the Query and Mutation global connection pool cache.
*/
private static get defaultOptions(): MSSQLOptions {
return {
config: {
user: '',
password: '',
server: '',
database: '',
},
logger: new DevConsoleLogger(),
};
}
}
| src/lib/datasource/mssql-datasource.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " * procedure results to the correct schema field names.\n * @param {ILogger} logger - The logger to use for logging.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async executeStoredProcedure<T>(\n storedProcedureName: string,\n input: InputParameters,\n request: Request,\n logger: ILogger,\n info?: GraphQLResolveInfo,",
"score": 73.58933829188
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " result[mappedKey] = obj[key];\n }\n return result as T;\n }\n /**\n * Prepares the stored procedure result into a GraphQL result object.\n * @param {IProcedureResult} result - The stored procedure result.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored\n * procedure results to the correct schema field names.\n * @returns {IResolverProcedureResult} A prepared GraphQL result object.",
"score": 67.75858904512344
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private readonly _storedProcedureMetadataManager: StoredProcedureMetadataManager,\n ) {}\n /**\n * Executes a stored procedure with the provided input parameters, and returns the result.\n * @template TVal - The type of records in the result set.\n * @template TRet - The type of the result object to be returned.\n * @param {string} storedProcedureName - The name of the stored procedure to execute.\n * @param {StoredProcedureInput} input - The input parameters for the stored procedure.\n * @param {Request} request - The request to execute the stored procedure.\n * @param {GraphQLResolveInfo | undefined} info - If provided, will be used to case-insensitively map the stored",
"score": 60.89840695081632
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " private static readonly parameterDefinitionRegex = /(@[\\w]+)\\s+([^\\s]+)\\s*=\\s*([^, ]*),?/gi;\n constructor(private readonly _databaseExecutor: DatabaseExecutor) {}\n /**\n * Parses the stored procedure parameter schema into a StoredProcedureParameter array.\n * @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.\n * @returns A Promise that resolves to the result of the stored procedure execution.\n */\n public async getStoredProcedureParameterSchema(\n storedProcedureName: string,\n logger: ILogger,",
"score": 42.18629584278235
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " }\n /**\n * Prepares the stored procedure request.\n * @param {IterableIterator<StoredProcedureParameter>} storedProcedureParameters - The stored procedure parameters.\n * @param {StoredProcedureInput} input - The input object.\n * @param {Request} request - The request object.\n * @returns A prepared request object.\n */\n private prepareStoredProcedureRequest(\n storedProcedureParameters: IterableIterator<StoredProcedureParameter>,",
"score": 36.29178538035577
}
] | typescript | input: InputParameters,
info?: GraphQLResolveInfo,
): Promise<IResolverProcedureResult<T>> { |
import { type IResult, type Request } from 'mssql';
import type { StoredProcedureParameter, StoredProcedureSchema, ILogger } from '../types';
import { type DatabaseExecutor } from '../executor';
import { convertSqlValueToJsValue } from '../utils';
/**
* A manager for stored procedure metadata.
* Handles the retrieval and caching of stored procedure metadata.
*/
export class StoredProcedureMetadataManager {
/**
* Regular expression to extract MSSQL stored procedure names.
* See https://regex101.com/r/cMsTyT/1 for this regex.
*/
private static readonly storedProcedureNameRegex =
/((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))/i;
/**
* Matches any comments from the Stored Procedure definition.
* See https://regex101.com/r/dxA7n0/1 for this regex.
*/
private static readonly commentRegex = /(?:\s*-{2}.+\s*$)|(?:\/\*([\s\S]*?)\*\/)/gm;
/**
* Matches the parameters from the Stored Procedure definition.
* See https://regex101.com/r/4TaTky/1 for this regex.
*/
private static readonly parameterSectionRegex =
/(?<=(?:CREATE|ALTER)\s+PROCEDURE)\s+((?:(?:\[([\w\s]+)\]|(\w+))\.)?(?:\[([\w\s]+)\]|(\w+))\.(?:\[([\w\s]+)\]|(\w+)))(.*?)(?=(?:AS|FOR\s+REPLICATION)[^\w])/is;
/**
* See https://regex101.com/r/iMEaLb/1 for this regex.
* Match the individual parameters in the Parameter Definition.
*/
private static readonly parameterDefinitionRegex = /(@[\w]+)\s+([^\s]+)\s*=\s*([^, ]*),?/gi;
constructor(private readonly _databaseExecutor: DatabaseExecutor) {}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to retrieve the parameter schema for.
* @returns A Promise that resolves to the result of the stored procedure execution.
*/
public async getStoredProcedureParameterSchema(
storedProcedureName: string,
logger: ILogger,
): Promise<IResult<StoredProcedureSchema>> {
return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {
// Remove square bracket notation if any, and split into schema and name.
const schemaAndName = storedProcedureName.replace(/\[|\]/g, '').split('.');
const result = await request.query<StoredProcedureSchema>(
'SELECT ' +
'PARAMETER_NAME as name, ' +
'DATA_TYPE as type, ' +
'PARAMETER_MODE as mode, ' +
'CHARACTER_MAXIMUM_LENGTH length, ' +
'NUMERIC_PRECISION as precision, ' +
'NUMERIC_SCALE as scale ' +
'FROM INFORMATION_SCHEMA.PARAMETERS ' +
`WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';
SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,
);
const recordSetLength = result.recordsets.length as number;
if (recordSetLength < 1 || recordSetLength > 2) {
throw new Error(
`Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,
);
}
if (recordSetLength !== 2 || result.recordsets[1].length !== 1) {
throw new Error(
`Could not retrieve stored procedure definition from Database for stored procedure ${storedProcedureName}.`,
);
}
return result;
}, logger);
}
/**
* Parses the stored procedure parameter schema into a StoredProcedureParameter array.
* @param {string} storedProcedureName - The name of the stored procedure to parse the parameter schema for.
* @param {IResult<StoredProcedureSchema>} schemaResult - The result of the stored procedure parameter schema query.
* @returns A StoredProcedureParameter array.
*/
public parseStoredProcedureParameters(
storedProcedureName: string,
schemaResult: IResult<StoredProcedureSchema>,
): IterableIterator<StoredProcedureParameter> {
const parameterSchemaMap: Map<string, StoredProcedureParameter> =
schemaResult.recordsets[0].reduce(
(parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {
parameterMap.set(item.name, item);
return parameterMap;
},
new Map<string, StoredProcedureParameter>(),
);
const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;
if (storedProcedureDefinition == null) {
throw new Error(
`Could not parse stored procedure definition for stored procedure ${storedProcedureName}.`,
);
}
const commentStrippedStoredProcedureDefinition = storedProcedureDefinition.replace(
StoredProcedureMetadataManager.commentRegex,
'',
);
if (commentStrippedStoredProcedureDefinition === '') {
throw new Error(
`Could not parse stored procedure comments from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterSection = commentStrippedStoredProcedureDefinition.match(
StoredProcedureMetadataManager.parameterSectionRegex,
);
if (parameterSection === null || parameterSection.length !== 9) {
throw new Error(
`Could not parse stored procedure parameters from definition for stored procedure ${storedProcedureName}.`,
);
}
const parameterDefinition = parameterSection[8];
let parameterDefinitionMatch;
while (
(parameterDefinitionMatch =
StoredProcedureMetadataManager.parameterDefinitionRegex.exec(parameterDefinition)) !== null
) {
const name = parameterDefinitionMatch[1];
const type = parameterDefinitionMatch[2];
const defaultValue = parameterDefinitionMatch[3];
const parameter = parameterSchemaMap.get(name);
if (parameter !== undefined) {
parameter | .defaultValue = convertSqlValueToJsValue(defaultValue, type); |
}
}
return parameterSchemaMap.values();
}
}
| src/lib/stored-procedure/stored-procedure-metadata-manager.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " private addParametersToRequest(\n parameters: Map<string, PreparedStoredProcedureParameter>,\n request: Request,\n ): Request {\n const preparedRequest = request;\n for (const parameter of parameters.values()) {\n const { name, type, mode, value, defaultValue } = parameter;\n if (defaultValue !== undefined && value === undefined) {\n continue;\n }",
"score": 39.96148392318941
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " // Check what required parameters are missing.\n const missingRequiredParameters = [];\n for (const parameter of parameters.values()) {\n // If they have a default value they can be ommitted from the request.\n if (parameter.defaultValue === undefined && parameter.value === undefined) {\n missingRequiredParameters.push(parameter);\n }\n }\n return missingRequiredParameters;\n }",
"score": 29.728186542258474
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 24.866840397809803
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { ParameterMode } from '.';\n/**\n * Represents a subset of used metadata for an MSSQL stored procedure parameter.\n * @property {string} name - The name of the parameter. Begins with @.\n * @property {string} type - The MSSQL data type of the parameter.\n * @property {ParameterMode} mode - The MSSQL mode of the parameter. Either 'IN', 'INOUT' or 'UNKNOWN'.\n * @property {unknown} defaultValue - The default value of the parameter, if any, or undefined.\n * @property {number} length - The length of character-based parameters, or undefined.\n * @property {number} precision - The precision of floating point parameters, or undefined.\n * @property {number} scale - The scale of floating point parameters, or undefined.",
"score": 23.55862077589879
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " const modeEnum = mode;\n if (modeEnum === ParameterMode.IN) {\n preparedRequest.input(name, type, value);\n } else if (modeEnum === ParameterMode.INOUT) {\n preparedRequest.output(name, type, value);\n } else {\n throw new Error(`Unknown parameter mode: ${mode}`);\n }\n }\n return preparedRequest;",
"score": 20.519241361763584
}
] | typescript | .defaultValue = convertSqlValueToJsValue(defaultValue, type); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
| const question = sanitizeInput(userInput); |
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/commands.ts",
"retrieved_chunk": " getCommands,\n async execute(commandName: string, args: string[], output: NodeJS.WriteStream) {\n const command = commands.find((cmd) => cmd.name === commandName || cmd.aliases.includes(commandName));\n if (command) {\n await command.execute(args, output, commandHandler);\n } else {\n output.write(chalk.red('Unknown command. Type /help to see the list of available commands.\\n'));\n }\n },\n };",
"score": 43.443447219217845
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nconst helpCommand = createCommand(\n 'help',\n ['h', '?'],\n 'Show the list of available commands',\n (_args, output, commandHandler) =>\n new Promise<void>((resolve) => {\n output.write(chalk.blue('Usage:\\n'));\n output.write('Ask memorybot to write some marketing materials and press enter.\\n');",
"score": 26.586409900014726
},
{
"filename": "src/commands/addYouTubeCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /add-url `youtube url` or `youtube videoid`\\n'));\n return;\n }\n const URLOrVideoID = args[0];\n await addYouTube(URLOrVideoID);\n }\n);\nexport default addYouTubeCommand;",
"score": 26.317081781710513
},
{
"filename": "src/commands/switchContextStoreCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /change-context-store `subdirectory`\\n'));\n return;\n }\n const subDirectory = args[0];\n await loadOrCreateEmptyVectorStore(subDirectory);\n }\n);\nexport default changeContextStoreCommand;",
"score": 24.73843273583841
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": " output.write(chalk.blue('\\nAvailable commands:\\n'));\n commandHandler.getCommands().forEach((command) => {\n const aliases = command.aliases.length > 0 ? ` (/${command.aliases.join(', /')})` : '';\n output.write(chalk.yellow(`/${command.name}${aliases}`));\n output.write(` - ${command.description}`);\n output.write('\\n');\n });\n resolve();\n })\n);",
"score": 24.690317565624113
}
] | typescript | const question = sanitizeInput(userInput); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
| const memoryVectorStore = await getMemoryVectorStore(); |
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/commands.ts",
"retrieved_chunk": " getCommands,\n async execute(commandName: string, args: string[], output: NodeJS.WriteStream) {\n const command = commands.find((cmd) => cmd.name === commandName || cmd.aliases.includes(commandName));\n if (command) {\n await command.execute(args, output, commandHandler);\n } else {\n output.write(chalk.red('Unknown command. Type /help to see the list of available commands.\\n'));\n }\n },\n };",
"score": 41.15015608952313
},
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": " const documents = await vectorStore.similaritySearch(sanitizedQuestion, numDocuments);\n return documents\n .map((doc) => doc.pageContent)\n .join(', ')\n .trim()\n .replaceAll('\\n', ' ');\n}\n// eslint-disable-next-line import/prefer-default-export\nexport { getRelevantContext };",
"score": 26.65255703229211
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nconst helpCommand = createCommand(\n 'help',\n ['h', '?'],\n 'Show the list of available commands',\n (_args, output, commandHandler) =>\n new Promise<void>((resolve) => {\n output.write(chalk.blue('Usage:\\n'));\n output.write('Ask memorybot to write some marketing materials and press enter.\\n');",
"score": 24.90862530422631
},
{
"filename": "src/commands/addYouTubeCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /add-url `youtube url` or `youtube videoid`\\n'));\n return;\n }\n const URLOrVideoID = args[0];\n await addYouTube(URLOrVideoID);\n }\n);\nexport default addYouTubeCommand;",
"score": 23.62953625150303
},
{
"filename": "src/commands/helpCommand.ts",
"retrieved_chunk": " output.write(chalk.blue('\\nAvailable commands:\\n'));\n commandHandler.getCommands().forEach((command) => {\n const aliases = command.aliases.length > 0 ? ` (/${command.aliases.join(', /')})` : '';\n output.write(chalk.yellow(`/${command.name}${aliases}`));\n output.write(` - ${command.description}`);\n output.write('\\n');\n });\n resolve();\n })\n);",
"score": 22.791335053984284
}
] | typescript | const memoryVectorStore = await getMemoryVectorStore(); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = | await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve); |
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/commands/setContextConfigCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /context-config `number of documents`\\n'));\n return;\n }\n const numContextDocumentsToRetrieve = parseInt(args[0], 10);\n setNumContextDocumentsToRetrieve(numContextDocumentsToRetrieve);\n const config = getConfig();\n output.write(chalk.blue(`Number of context documents to retrieve set to ${config.numContextDocumentsToRetrieve}`));\n }\n);",
"score": 33.17879584336979
},
{
"filename": "src/commands.ts",
"retrieved_chunk": " getCommands,\n async execute(commandName: string, args: string[], output: NodeJS.WriteStream) {\n const command = commands.find((cmd) => cmd.name === commandName || cmd.aliases.includes(commandName));\n if (command) {\n await command.execute(args, output, commandHandler);\n } else {\n output.write(chalk.red('Unknown command. Type /help to see the list of available commands.\\n'));\n }\n },\n };",
"score": 29.590925921345477
},
{
"filename": "src/commands/setMemoryConfigCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /memory-config `number of documents`\\n'));\n return;\n }\n const numMemoryDocumentsToRetrieve = parseInt(args[0], 10);\n setNumMemoryDocumentsToRetrieve(numMemoryDocumentsToRetrieve);\n const config = getConfig();\n output.write(chalk.blue(`Number of memory documents to retrieve set to ${config.numMemoryDocumentsToRetrieve}`));\n }\n);",
"score": 25.662486547403486
},
{
"filename": "src/commands/setContextConfigCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { setNumContextDocumentsToRetrieve, getConfig } from '../config/index.js';\nconst setContextConfigCommand = createCommand(\n 'context-config',\n ['cc'],\n `Sets the number of relevant documents to return from the context vector store.\\n\n Arguments: \\`number of documents\\` (Default: 6)\\n\n Example: \\`/context-config 10\\``,\n async (args, output) => {",
"score": 23.10302206564482
},
{
"filename": "src/commands/switchContextStoreCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /change-context-store `subdirectory`\\n'));\n return;\n }\n const subDirectory = args[0];\n await loadOrCreateEmptyVectorStore(subDirectory);\n }\n);\nexport default changeContextStoreCommand;",
"score": 22.462481831630377
}
] | typescript | await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await | logChat(chatLogDirectory, question, response.response); |
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 26.12784570219595
},
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": "const getLogFilename = (): string => {\n const currentDate = new Date();\n const year = currentDate.getFullYear();\n const month = String(currentDate.getMonth() + 1).padStart(2, '0');\n const day = String(currentDate.getDate()).padStart(2, '0');\n return `${year}-${month}-${day}.json`;\n};\nconst logChat = async (logDirectory: string, question: string, answer: string): Promise<void> => {\n const timestamp = new Date().toISOString();\n const chatHistory: ChatHistory = { timestamp, question, answer };",
"score": 20.768118191078635
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " userAgent: 'node-crawler',\n });\n }\n /* `handleRequest` is a method that handles the response of a web page request made by the `crawler`\nobject. It takes in three parameters: `error`, `res`, and `done`. */\n handleRequest = (error: Error | null, res: CrawlerRequestResponse, done: () => void) => {\n if (error) {\n stderr.write(error.message);\n done();\n return;",
"score": 16.950446248300423
},
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": "import fs from 'fs-extra';\nimport path from 'path';\ninterface ChatHistory {\n timestamp: string;\n question: string;\n answer: string;\n}\nconst ensureLogDirectory = (logDirectory: string): void => {\n fs.ensureDirSync(logDirectory);\n};",
"score": 16.61447602860862
},
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": "import { HNSWLib } from 'langchain/vectorstores/hnswlib';\n/**\n * Retrieves relevant context for the given question by performing a similarity search on the provided vector store.\n * @param {HNSWLib} vectorStore - HNSWLib is a library for approximate nearest neighbor search, used to\n * search for similar vectors in a high-dimensional space.\n * @param {string} sanitizedQuestion - The sanitized version of the question that needs to be answered.\n * It is a string input.\n * @param {number} numDocuments - The `numDocuments` parameter is the number of documents that the\n * `getRelevantContext` function should retrieve from the `vectorStore` based on their similarity to\n * the `sanitizedQuestion`.",
"score": 10.158214731155555
}
] | typescript | logChat(chatLogDirectory, question, response.response); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
| const windowMemory = getBufferWindowMemory(); |
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n } catch {\n spinner = ora({\n ...defaultOraOptions,\n text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),\n }).start();\n const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');\n const filesToAdd = await getDirectoryFiles(docsDirectory);",
"score": 11.939696492442655
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " let vectorStore: HNSWLib;\n let spinner;\n const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);\n await createDirectory(newContextVectorStorePath);\n setCurrentVectorStoreDatabasePath(newContextVectorStorePath);\n const dbDirectory = getConfig().currentVectorStoreDatabasePath;\n try {\n vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));\n output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\\n`));\n } catch {",
"score": 10.963325930905391
},
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": " const url = args[0];\n const selector = args[1];\n const maxLinks = parseInt(args[2], 10) || 20;\n const minChars = parseInt(args[3], 10) || 200;\n await addURL(url, selector, maxLinks, minChars);\n }\n);\nexport default addURLCommand;",
"score": 9.840251129636112
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 9.412104732460097
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nconst bufferWindowMemory = new BufferWindowMemory({\n returnMessages: false,\n memoryKey: 'immediate_history',\n inputKey: 'input',\n k: 2,\n});\nconst memoryWrapper = {\n vectorStoreInstance: memoryVectorStore,\n};",
"score": 9.028070238804792
}
] | typescript | const windowMemory = getBufferWindowMemory(); |
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
| }: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => { |
const types: IndexableTypes = TYPES;
const property = findPropertyCaseInsensitive(types, type);
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
| src/lib/utils/type-map.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 29.807992864678372
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 29.61757251133219
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { ParameterMode } from '.';\n/**\n * Represents a subset of used metadata for an MSSQL stored procedure parameter.\n * @property {string} name - The name of the parameter. Begins with @.\n * @property {string} type - The MSSQL data type of the parameter.\n * @property {ParameterMode} mode - The MSSQL mode of the parameter. Either 'IN', 'INOUT' or 'UNKNOWN'.\n * @property {unknown} defaultValue - The default value of the parameter, if any, or undefined.\n * @property {number} length - The length of character-based parameters, or undefined.\n * @property {number} precision - The precision of floating point parameters, or undefined.\n * @property {number} scale - The scale of floating point parameters, or undefined.",
"score": 18.80691710730278
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " 'NUMERIC_PRECISION as precision, ' +\n 'NUMERIC_SCALE as scale ' +\n 'FROM INFORMATION_SCHEMA.PARAMETERS ' +\n `WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';\n SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,\n );\n const recordSetLength = result.recordsets.length as number;\n if (recordSetLength < 1 || recordSetLength > 2) {\n throw new Error(\n `Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,",
"score": 15.747463752793871
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " const parameterSchemaMap: Map<string, StoredProcedureParameter> =\n schemaResult.recordsets[0].reduce(\n (parameterMap: Map<string, StoredProcedureParameter>, item: StoredProcedureParameter) => {\n parameterMap.set(item.name, item);\n return parameterMap;\n },\n new Map<string, StoredProcedureParameter>(),\n );\n const storedProcedureDefinition = schemaResult.recordsets[1][0].storedProcedureDefinition;\n if (storedProcedureDefinition == null) {",
"score": 9.529199215726504
}
] | typescript | }: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => { |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
| kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{ |
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 15.243287024346202
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 14.79500541829947
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " // [1]. `declare const a: number`, [2]. `: number`. remove [2]\n function clearUselessTypes(this: TypeAnalyzer) {\n const indexsToRemove = new Set<number>();\n this.analyzedTypes.forEach((type, index) => {\n if (indexsToRemove.has(index)) return;\n this.analyzedTypes.forEach((_type, _index) => {\n if (index === _index || indexsToRemove.has(_index)) return;\n if (isEqual(_type, type)) return indexsToRemove.add(index);\n if (type.range.pos >= _type.range.pos) {\n if (type.range.end < _type.range.end) indexsToRemove.add(index);",
"score": 14.619265629940006
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.forEach(type => {\n const oldTextLength = type.text.length;\n type.text = type.text.replace(/^[\\r\\n]+/, '');\n const startLineBreakCount = oldTextLength - type.text.length;\n type.text = type.text.replace(/[\\r\\n]+$/, '');\n const endLineBreakCount = oldTextLength - startLineBreakCount - type.text.length;\n type.range.pos += startLineBreakCount;\n type.range.end -= endLineBreakCount;\n });\n }",
"score": 10.760336703066152
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": "import vscode from 'vscode';\nimport { type AnalyzedType, TypeAnalyzer } from './helpers/type-analyzer';\nimport { debounce, isEqual } from 'lodash-es';\nimport { GlobalState } from './global-state';\nimport { Config } from './config';\ntype FoldingRange = Record<'start' | 'end', /* lineNumber */ number>;\ninterface EditorInfo {\n code: string;\n analyzedTypes: AnalyzedType[];\n isTSX: boolean;",
"score": 9.531166376871653
}
] | typescript | kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{ |
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {
const types: IndexableTypes = TYPES;
const property = findPropertyCaseInsensitive(types, type);
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return ( | typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length); |
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
| src/lib/utils/type-map.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { ParameterMode } from '.';\n/**\n * Represents a subset of used metadata for an MSSQL stored procedure parameter.\n * @property {string} name - The name of the parameter. Begins with @.\n * @property {string} type - The MSSQL data type of the parameter.\n * @property {ParameterMode} mode - The MSSQL mode of the parameter. Either 'IN', 'INOUT' or 'UNKNOWN'.\n * @property {unknown} defaultValue - The default value of the parameter, if any, or undefined.\n * @property {number} length - The length of character-based parameters, or undefined.\n * @property {number} precision - The precision of floating point parameters, or undefined.\n * @property {number} scale - The scale of floating point parameters, or undefined.",
"score": 33.52813111372275
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 31.368536268022577
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " 'NUMERIC_PRECISION as precision, ' +\n 'NUMERIC_SCALE as scale ' +\n 'FROM INFORMATION_SCHEMA.PARAMETERS ' +\n `WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';\n SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,\n );\n const recordSetLength = result.recordsets.length as number;\n if (recordSetLength < 1 || recordSetLength > 2) {\n throw new Error(\n `Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,",
"score": 26.494826072857084
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 23.89551884890487
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " ): Promise<IResult<StoredProcedureSchema>> {\n return await this._databaseExecutor.executeQueryRequest(async (request: Request) => {\n // Remove square bracket notation if any, and split into schema and name.\n const schemaAndName = storedProcedureName.replace(/\\[|\\]/g, '').split('.');\n const result = await request.query<StoredProcedureSchema>(\n 'SELECT ' +\n 'PARAMETER_NAME as name, ' +\n 'DATA_TYPE as type, ' +\n 'PARAMETER_MODE as mode, ' +\n 'CHARACTER_MAXIMUM_LENGTH length, ' +",
"score": 22.63663173087439
}
] | typescript | typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length); |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: | TYPE_KIND.TYPE_ALIAS
},
{ |
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 15.243287024346202
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 14.79500541829947
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.forEach(type => {\n const oldTextLength = type.text.length;\n type.text = type.text.replace(/^[\\r\\n]+/, '');\n const startLineBreakCount = oldTextLength - type.text.length;\n type.text = type.text.replace(/[\\r\\n]+$/, '');\n const endLineBreakCount = oldTextLength - startLineBreakCount - type.text.length;\n type.range.pos += startLineBreakCount;\n type.range.end -= endLineBreakCount;\n });\n }",
"score": 12.828744264724813
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ) {\n const kind =\n child.kind === ts.SyntaxKind.InterfaceDeclaration\n ? TYPE_KIND.INTERFACE\n : TYPE_KIND.TYPE_ALIAS;\n this.pushAnalyzedType(kind, [child.pos, child.end]);\n }\n // context = `a: number`, curChild = `number`\n function handleParentParameter(\n this: TypeAnalyzer,",
"score": 12.520269116458632
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": "import vscode from 'vscode';\nimport { type AnalyzedType, TypeAnalyzer } from './helpers/type-analyzer';\nimport { debounce, isEqual } from 'lodash-es';\nimport { GlobalState } from './global-state';\nimport { Config } from './config';\ntype FoldingRange = Record<'start' | 'end', /* lineNumber */ number>;\ninterface EditorInfo {\n code: string;\n analyzedTypes: AnalyzedType[];\n isTSX: boolean;",
"score": 11.04332124511366
}
] | typescript | TYPE_KIND.TYPE_ALIAS
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: | TYPE_KIND.SATISFIES_OPERATOR
},
{ |
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 15.243287024346202
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 14.79500541829947
},
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": " * const user = { ... } satisfies UserModel;\n * ```\n * ⏭️ ` satisfies UserModel`\n */\n SATISFIES_OPERATOR = 'satisfies-operator',\n /**\n * ```ts\n * declare const a: number;\n * declare function b(): number;\n * declare class c {}",
"score": 12.037326847062548
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.forEach(type => {\n const oldTextLength = type.text.length;\n type.text = type.text.replace(/^[\\r\\n]+/, '');\n const startLineBreakCount = oldTextLength - type.text.length;\n type.text = type.text.replace(/[\\r\\n]+$/, '');\n const endLineBreakCount = oldTextLength - startLineBreakCount - type.text.length;\n type.range.pos += startLineBreakCount;\n type.range.end -= endLineBreakCount;\n });\n }",
"score": 10.760336703066152
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": "import vscode from 'vscode';\nimport { type AnalyzedType, TypeAnalyzer } from './helpers/type-analyzer';\nimport { debounce, isEqual } from 'lodash-es';\nimport { GlobalState } from './global-state';\nimport { Config } from './config';\ntype FoldingRange = Record<'start' | 'end', /* lineNumber */ number>;\ninterface EditorInfo {\n code: string;\n analyzedTypes: AnalyzedType[];\n isTSX: boolean;",
"score": 9.531166376871653
}
] | typescript | TYPE_KIND.SATISFIES_OPERATOR
},
{ |
import {
type ISqlTypeFactory,
type ISqlTypeFactoryWithLength,
type ISqlTypeFactoryWithNoParams,
type ISqlTypeFactoryWithPrecisionScale,
type ISqlTypeFactoryWithScale,
type ISqlTypeFactoryWithTvpType,
type ISqlTypeWithLength,
type ISqlTypeWithNoParams,
type ISqlTypeWithPrecisionScale,
type ISqlTypeWithScale,
type ISqlTypeWithTvpType,
TYPES,
MAX,
} from 'mssql';
import type { StoredProcedureParameter } from '../types';
type TypeFactory<T> = T extends ISqlTypeFactoryWithNoParams
? () => ISqlTypeWithNoParams
: T extends ISqlTypeFactoryWithLength
? (length?: number) => ISqlTypeWithLength
: T extends ISqlTypeFactoryWithScale
? (scale?: number) => ISqlTypeWithScale
: T extends ISqlTypeFactoryWithPrecisionScale
? (precision?: number, scale?: number) => ISqlTypeWithPrecisionScale
: T extends ISqlTypeFactoryWithTvpType
? (tvpType?: unknown) => ISqlTypeWithTvpType
: never;
type TypesType = typeof TYPES;
type TypesKey = keyof TypesType;
type IndexableTypes = {
[K in TypesKey]: TypeFactory<TypesType[K]>;
};
function isSqlTypeFactoryWithNoParams(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithNoParams {
return (
factoryObject !== undefined &&
!('length' in factoryObject) &&
!('scale' in factoryObject) &&
!('precision' in factoryObject) &&
!('tvpType' in factoryObject)
);
}
function isSqlTypeFactoryWithLength(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithLength {
return factoryObject !== undefined && 'length' in factoryObject;
}
function isSqlTypeFactoryWithScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithScale {
return factoryObject !== undefined && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithPrecisionScale(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithPrecisionScale {
return factoryObject !== undefined && 'precision' in factoryObject && 'scale' in factoryObject;
}
function isSqlTypeFactoryWithTvpType(
factoryObject: ISqlTypeFactory,
): factoryObject is ISqlTypeFactoryWithTvpType {
return factoryObject !== undefined && 'tvpType' in factoryObject;
}
const findPropertyCaseInsensitive = (obj: object, propertyName: string): string | null => {
const lowercasePropertyName = propertyName.toLowerCase();
for (const key in obj) {
if (
Object.prototype.hasOwnProperty.call(obj, key) &&
key.toLowerCase() === lowercasePropertyName
) {
return key;
}
}
return null;
};
export const mapDbTypeToDriverType = ({
type,
length,
precision,
scale,
}: Pick<StoredProcedureParameter, 'type' | 'length' | 'precision' | 'scale'>): ISqlTypeFactory => {
const types: IndexableTypes = TYPES;
| const property = findPropertyCaseInsensitive(types, type); |
if (property !== null) {
const typeFactory = types[property as TypesKey];
if (isSqlTypeFactoryWithNoParams(typeFactory)) {
return typeFactory();
} else if (isSqlTypeFactoryWithLength(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithLength)(length === -1 ? MAX : length);
} else if (isSqlTypeFactoryWithScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithScale)(scale);
} else if (isSqlTypeFactoryWithPrecisionScale(typeFactory)) {
return (typeFactory as ISqlTypeFactoryWithPrecisionScale)(precision, scale);
} else if (isSqlTypeFactoryWithTvpType(typeFactory)) {
return TYPES.NVarChar();
} else {
throw new Error(`Unknown SQL Type ${type}.`);
}
}
return TYPES.NVarChar();
};
type SqlValue = string | number | boolean | Date | Buffer;
const isStringOrNumber = (value: SqlValue): value is string | number => {
return typeof value === 'string' || typeof value === 'number';
};
const isDate = (value: SqlValue): value is Date => {
return value instanceof Date;
};
const isType = (sqlType: string, typePrefixes: string[]): boolean => {
return typePrefixes.some((prefix) => sqlType.startsWith(prefix));
};
export const convertSqlValueToJsValue = (value: SqlValue, sqlType: string): unknown => {
if (value === 'NULL') {
return null;
}
const lowerCaseSqlType = sqlType.toLowerCase();
if (
isType(lowerCaseSqlType, [
'varchar',
'nvarchar',
'char',
'nchar',
'text',
'ntext',
'xml',
'uniqueidentifier',
])
) {
return String(value);
}
if (
isType(lowerCaseSqlType, [
'int',
'smallint',
'tinyint',
'bigint',
'decimal',
'numeric',
'float',
'real',
'money',
'smallmoney',
])
) {
return Number(value);
}
if (isType(lowerCaseSqlType, ['bit'])) {
return Boolean(value);
}
if (isType(lowerCaseSqlType, ['date', 'datetime', 'datetime2', 'smalldatetime', 'time'])) {
if (isStringOrNumber(value) || isDate(value)) {
return new Date(value);
}
throw new Error('Cannot create a Date from a boolean value.');
}
if (isType(lowerCaseSqlType, ['binary', 'varbinary', 'image'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['rowversion', 'timestamp'])) {
return Buffer.from(value as Buffer);
}
if (isType(lowerCaseSqlType, ['hierarchyid', 'geometry', 'geography'])) {
return value;
}
if (isType(lowerCaseSqlType, ['tvp'])) {
throw new Error('TVPs are not supported.');
}
if (isType(lowerCaseSqlType, ['udt'])) {
throw new Error('UDTs are not supported.');
}
throw new Error(`Unsupported SQL type: ${sqlType}`);
};
| src/lib/utils/type-map.ts | Falven-mssql-data-source-bca6621 | [
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " for (const spParameter of storedProcedureParameters) {\n const { name, type, length, precision, scale, ...rest } = spParameter;\n const parameterName = name.slice(1);\n // Let's use the parameter name in lowercase as the lookup key.\n preparedParameters.set(parameterName.toLowerCase(), {\n name: parameterName,\n type: mapDbTypeToDriverType({\n type,\n length,\n precision,",
"score": 33.59022642602205
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": " */\nexport interface StoredProcedureParameter {\n name: string;\n type: string;\n mode: ParameterMode;\n defaultValue?: unknown;\n length?: number;\n precision?: number;\n scale?: number;\n}",
"score": 30.991570086471903
},
{
"filename": "src/lib/types/i-stored-procedure-parameter.ts",
"retrieved_chunk": "import type { ParameterMode } from '.';\n/**\n * Represents a subset of used metadata for an MSSQL stored procedure parameter.\n * @property {string} name - The name of the parameter. Begins with @.\n * @property {string} type - The MSSQL data type of the parameter.\n * @property {ParameterMode} mode - The MSSQL mode of the parameter. Either 'IN', 'INOUT' or 'UNKNOWN'.\n * @property {unknown} defaultValue - The default value of the parameter, if any, or undefined.\n * @property {number} length - The length of character-based parameters, or undefined.\n * @property {number} precision - The precision of floating point parameters, or undefined.\n * @property {number} scale - The scale of floating point parameters, or undefined.",
"score": 25.375576846830338
},
{
"filename": "src/lib/stored-procedure/stored-procedure-metadata-manager.ts",
"retrieved_chunk": " 'NUMERIC_PRECISION as precision, ' +\n 'NUMERIC_SCALE as scale ' +\n 'FROM INFORMATION_SCHEMA.PARAMETERS ' +\n `WHERE SPECIFIC_SCHEMA = '${schemaAndName[0]}' AND SPECIFIC_NAME = '${schemaAndName[1]}';\n SELECT OBJECT_DEFINITION(OBJECT_ID('${storedProcedureName}')) AS storedProcedureDefinition;`,\n );\n const recordSetLength = result.recordsets.length as number;\n if (recordSetLength < 1 || recordSetLength > 2) {\n throw new Error(\n `Could not retrieve stored procedure parameter schema from Database for stored procedure ${storedProcedureName}.`,",
"score": 17.07377886696033
},
{
"filename": "src/lib/stored-procedure/stored-procedure-manager.ts",
"retrieved_chunk": " type InputParameters,\n} from '../types';\nimport { mapDbTypeToDriverType, replacer } from '../utils';\nimport { logExecutionBegin, logPerformance, logSafely } from '../logging';\nimport {\n type StoredProcedureCacheManager,\n type StoredProcedureMetadataManager,\n} from '../stored-procedure';\nimport { type IResolverProcedureResult } from '../types/i-resolver-procedure-result';\nimport { getNodeSelectionSetNames, getFieldNamesExcludingNode } from '../utils/graphql-helper';",
"score": 15.446148339068884
}
] | typescript | const property = findPropertyCaseInsensitive(types, type); |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: | TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{ |
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 14.79500541829947
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 12.955022317033112
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.forEach(type => {\n const oldTextLength = type.text.length;\n type.text = type.text.replace(/^[\\r\\n]+/, '');\n const startLineBreakCount = oldTextLength - type.text.length;\n type.text = type.text.replace(/[\\r\\n]+$/, '');\n const endLineBreakCount = oldTextLength - startLineBreakCount - type.text.length;\n type.range.pos += startLineBreakCount;\n type.range.end -= endLineBreakCount;\n });\n }",
"score": 10.760336703066152
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n if (ts.isTypePredicateNode(curChild)) {\n // children[index], node = x is any\n // :\n const prevNode = children[index - 1];\n return this.pushAnalyzedType(TYPE_KIND.FUNCTION_TYPE_PREDICATE, [\n prevNode.end - 1,",
"score": 9.707691821748913
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": "import { isEqual } from 'lodash-es';\nimport ts from 'typescript';\nimport { TYPE_KIND } from './constants';\nexport interface AnalyzedType {\n kind: TYPE_KIND;\n range: ts.TextRange;\n text: string;\n}\nexport class TypeAnalyzer {\n public sourceFile: ts.SourceFile;",
"score": 8.552806220641383
}
] | typescript | TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
| kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{ |
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n }\n const children = parent.getChildren(this.sourceFile);\n const index = children.findIndex(\n child => child.pos === curChild.pos && child.end === curChild.end\n );\n // ↓↓ function a<B extends 222>(test: ...) { ... } ↓↓\n if (ts.isTypeParameterDeclaration(curChild) && parent.typeParameters) {\n // children.slice(startIndex, endIndex) = B extends 222, C extends ...\n const startIndex = children.findIndex(",
"score": 18.528105511000348
},
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": " /**\n * ```ts\n * function fn<A extends string, B = [A, '']>() {}\n * ```\n * ⏭️ `<A extends string, B = [A, '']>`\n */\n FUNCTION_GENERIC_DEFINITION = 'function-generic-definition',\n /**\n * ```ts\n * const name = get<UserModule>(userModule, 'info.name');",
"score": 18.168871521435943
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 14.79500541829947
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n // function a<B extends 222>(test: ...): void;\n const isOverload = parent.body === undefined;\n if (isOverload) {\n // public a<B extends 222>(test: ...): void;\n if (ts.isMethodDeclaration(parent)) {\n let startPos = parent.name.end;\n if (parent.modifiers && parent.modifiers.length > 0) {\n startPos = parent.modifiers[0].pos;",
"score": 13.180882664815904
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 12.955022317033112
}
] | typescript | kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
| kind: TYPE_KIND.INTERFACE
},
{ |
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 14.79500541829947
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 12.955022317033112
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": "import { isEqual } from 'lodash-es';\nimport ts from 'typescript';\nimport { TYPE_KIND } from './constants';\nexport interface AnalyzedType {\n kind: TYPE_KIND;\n range: ts.TextRange;\n text: string;\n}\nexport class TypeAnalyzer {\n public sourceFile: ts.SourceFile;",
"score": 11.610388848919964
},
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": " * interface A {\n * ...\n * }\n * ```\n * ⏭️ `interface A { ... }`\n */\n INTERFACE = 'interface',\n /**\n * ```ts\n * function fn(a: number): number[];",
"score": 10.97234030675365
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": "import vscode from 'vscode';\nimport { type AnalyzedType, TypeAnalyzer } from './helpers/type-analyzer';\nimport { debounce, isEqual } from 'lodash-es';\nimport { GlobalState } from './global-state';\nimport { Config } from './config';\ntype FoldingRange = Record<'start' | 'end', /* lineNumber */ number>;\ninterface EditorInfo {\n code: string;\n analyzedTypes: AnalyzedType[];\n isTSX: boolean;",
"score": 10.879068001026175
}
] | typescript | kind: TYPE_KIND.INTERFACE
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: | TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{ |
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 26.069617262537797
},
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": " * const user = { ... } satisfies UserModel;\n * ```\n * ⏭️ ` satisfies UserModel`\n */\n SATISFIES_OPERATOR = 'satisfies-operator',\n /**\n * ```ts\n * declare const a: number;\n * declare function b(): number;\n * declare class c {}",
"score": 20.643122313535798
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 18.895860069609597
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": "import { isEqual } from 'lodash-es';\nimport ts from 'typescript';\nimport { TYPE_KIND } from './constants';\nexport interface AnalyzedType {\n kind: TYPE_KIND;\n range: ts.TextRange;\n text: string;\n}\nexport class TypeAnalyzer {\n public sourceFile: ts.SourceFile;",
"score": 17.32119675839703
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ) {\n const kind =\n child.kind === ts.SyntaxKind.InterfaceDeclaration\n ? TYPE_KIND.INTERFACE\n : TYPE_KIND.TYPE_ALIAS;\n this.pushAnalyzedType(kind, [child.pos, child.end]);\n }\n // context = `a: number`, curChild = `number`\n function handleParentParameter(\n this: TypeAnalyzer,",
"score": 16.73295715824397
}
] | typescript | TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: | TYPE_KIND.DECLARE_STATEMENT
},
{ |
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/constants.ts",
"retrieved_chunk": " * const user = { ... } satisfies UserModel;\n * ```\n * ⏭️ ` satisfies UserModel`\n */\n SATISFIES_OPERATOR = 'satisfies-operator',\n /**\n * ```ts\n * declare const a: number;\n * declare function b(): number;\n * declare class c {}",
"score": 18.328076515898218
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 16.603920674868508
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 14.79500541829947
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.forEach(type => {\n const oldTextLength = type.text.length;\n type.text = type.text.replace(/^[\\r\\n]+/, '');\n const startLineBreakCount = oldTextLength - type.text.length;\n type.text = type.text.replace(/[\\r\\n]+$/, '');\n const endLineBreakCount = oldTextLength - startLineBreakCount - type.text.length;\n type.range.pos += startLineBreakCount;\n type.range.end -= endLineBreakCount;\n });\n }",
"score": 12.061634683405618
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": "import vscode from 'vscode';\nimport { type AnalyzedType, TypeAnalyzer } from './helpers/type-analyzer';\nimport { debounce, isEqual } from 'lodash-es';\nimport { GlobalState } from './global-state';\nimport { Config } from './config';\ntype FoldingRange = Record<'start' | 'end', /* lineNumber */ number>;\ninterface EditorInfo {\n code: string;\n analyzedTypes: AnalyzedType[];\n isTSX: boolean;",
"score": 11.439999960313823
}
] | typescript | TYPE_KIND.DECLARE_STATEMENT
},
{ |
import { describe, expect, it } from 'vitest';
import { TypeAnalyzer } from '.';
import { TYPE_KIND } from './constants';
describe('function', () => {
it('overloading', () => {
const analyzer = new TypeAnalyzer(`
const t = 1
function a<B extends 222>(): void;
function b<A>(o: A): string;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 48 },
text: 'function a<B extends 222>(): void;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 49, end: 77 },
text: 'function b<A>(o: A): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
}
]);
});
it('function-generic-definition - a`<B extends ...>`()', () => {
const analyzer = new TypeAnalyzer(
`
function a<B extends 111, C extends 111>() {}
const b = <B extends 222, C extends 222>() => {};
const c = function<B extends 333, C extends 333>() {}
const d = {
a<B extends 444, C extends 444>() {}
}
`
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 41 },
text: '<B extends 111, C extends 111>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 57, end: 87 },
text: '<B extends 222, C extends 222>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 115, end: 145 },
text: '<B extends 333, C extends 333>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
},
{
range: { pos: 166, end: 196 },
text: '<B extends 444, C extends 444>',
kind: TYPE_KIND.FUNCTION_GENERIC_DEFINITION
}
]);
});
it('function-parameter - (`a: number, b: string, ...`)', () => {
const analyzer = new TypeAnalyzer(`
function a(a1: A111, a2?: A222) {}
const b = (b1: B111, b2?: B222) => {};
const c = function(c1: C111, c2?: C222) {}
const d = {
e(d1: E111, d2?: E222) {}
f: (f1: F111, f2?: F222) => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 24, end: 31 },
text: '?: A222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 49, end: 55 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 59, end: 66 },
text: '?: B222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 96, end: 102 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 106, end: 113 },
text: '?: C222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 136, end: 142 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 146, end: 153 },
text: '?: E222',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 166, end: 172 },
text: ': F111',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 176, end: 183 },
text: '?: F222',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
it('function-return - ()`: number`', () => {
const analyzer = new TypeAnalyzer(`n
function a(): A111 {}
const b = (): B111 => {};
const c = function(): C111 {}
const d = {
d(): D111 {}
e: (): E111 => {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 20 },
text: ': A111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 36, end: 42 },
text: ': B111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 70, end: 76 },
text: ': C111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 97, end: 103 },
text: ': D111',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 114, end: 120 },
text: ': E111',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('function-type-predicate - (a: any)`: asserts a is ...)`', () => {
const analyzer = new TypeAnalyzer(`
function a(value): asserts a is aaa {}
const b = (value): asserts b is bbb => {};
const c = function (value): asserts d is ddd {};
const d = {
e(value): asserts e is eee {},
f: (value): asserts f is fff => {}
};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 18, end: 36 },
text: ': asserts a is aaa',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 58, end: 76 },
text: ': asserts b is bbb',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 111, end: 129 },
text: ': asserts d is ddd',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 157, end: 175 },
text: ': asserts e is eee',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
},
{
range: { pos: 192, end: 210 },
text: ': asserts f is fff',
kind: TYPE_KIND.FUNCTION_TYPE_PREDICATE
}
]);
});
});
it('interface', () => {
const analyzer = new TypeAnalyzer(`
interface t {};
interface A111 {
a: number;
b: string;
c: {
e: 1
}
}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 15 },
text: 'interface t {}',
kind: TYPE_KIND.INTERFACE
},
{
range: { pos: 17, end: 81 },
text: 'interface A111 {\n a: number;\n b: string;\n c: {\n e: 1\n }\n}',
kind: TYPE_KIND.INTERFACE
}
]);
});
it('type alias', () => {
const analyzer = new TypeAnalyzer(`
type t = number;
type A111 = {
a: number;
} | 123 & {}`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 17 },
text: 'type t = number;',
kind: TYPE_KIND.TYPE_ALIAS
},
{
range: { pos: 18, end: 58 },
text: 'type A111 = {\n a: number;\n} | 123 & {}',
kind: TYPE_KIND.TYPE_ALIAS
}
]);
});
it('variable type definition', () => {
const analyzer = new TypeAnalyzer(`
const a = 1;
declare const b: number, c: string;
const d: number, e: string;
const eee: null | string = ''
let fff!: string = ''
using ggg: usingAny = fn();
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 49 },
text: 'declare const b: number, c: string;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 57, end: 65 },
text: ': number',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 68, end: 76 },
text: ': string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 87, end: 102 },
text: ': null | string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 115, end: 124 },
text: '!: string',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
},
{
range: { pos: 139, end: 149 },
text: ': usingAny',
kind: TYPE_KIND.VARIABLE_TYPE_DEFINITION
}
]);
});
it('declare statement', () => {
const analyzer = new TypeAnalyzer(`
declare const a: number;
declare function b(): number;
declare class c {}
declare module d {}
declare namespace e {}
declare enum f {}
declare global {}
declare module 'g' {}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 1, end: 25 },
text: 'declare const a: number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 26, end: 55 },
text: 'declare function b(): number;',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 56, end: 74 },
text: 'declare class c {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 75, end: 94 },
text: 'declare module d {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 95, end: 117 },
text: 'declare namespace e {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 118, end: 135 },
text: 'declare enum f {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 136, end: 153 },
text: 'declare global {}',
kind: TYPE_KIND.DECLARE_STATEMENT
},
{
range: { pos: 154, end: 175 },
text: "declare module 'g' {}",
kind: TYPE_KIND.DECLARE_STATEMENT
}
]);
});
it('as expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 as number;
const b = 1 as number | string;
const c = 1 as number | string | null as 111 as 3;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 22 },
text: ' as number',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 35, end: 54 },
text: ' as number | string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 67, end: 93 },
text: ' as number | string | null',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 93, end: 100 },
text: ' as 111',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 100, end: 105 },
text: ' as 3',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
it('satisfies expression', () => {
const analyzer = new TypeAnalyzer(`
const a = 1 satisfies number;
const b = 1 satisfies number | string;
const c = 1 satisfies number | string | null;
const d = () => {
return 333 satisfies any
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 12, end: 29 },
text: ' satisfies number',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 42, end: 68 },
text: ' satisfies number | string',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 81, end: 114 },
text: ' satisfies number | string | null',
kind: TYPE_KIND.SATISFIES_OPERATOR
},
{
range: { pos: 147, end: 161 },
text: ' satisfies any',
kind: TYPE_KIND.SATISFIES_OPERATOR
}
]);
});
it('satisfies & as', () => {
const analyzer = new TypeAnalyzer(`
const a = {} satisfies {} as const;
const b = {} as const satisfies {};
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 13, end: 26 },
text: ' satisfies {}'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 26, end: 35 },
text: ' as const'
},
{
kind: TYPE_KIND.AS_ASSERTION,
range: { pos: 49, end: 58 },
text: ' as const'
},
{
kind: TYPE_KIND.SATISFIES_OPERATOR,
range: { pos: 58, end: 71 },
text: ' satisfies {}'
}
]);
});
it('type assertion', () => {
const analyzer = new TypeAnalyzer(`
const a =<number>1;
const b = <number | string>1;
const c = <number | string | null>1;
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 10, end: 18 },
text: '<number>',
| kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{ |
range: { pos: 31, end: 48 },
text: '<number | string>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{
range: { pos: 61, end: 85 },
text: '<number | string | null>',
kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
}
]);
});
it('call expression', () => {
const analyzer = new TypeAnalyzer(`
b<number>();
new d<number, string>();
f<number, string, null>();
new Set<PersistListener<S>>()
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 2, end: 10 },
text: '<number>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 19, end: 35 },
text: '<number, string>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { pos: 40, end: 62 },
text: '<number, string, null>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
},
{
range: { end: 93, pos: 73 },
text: '<PersistListener<S>>',
kind: TYPE_KIND.FUNCTION_CALL_GENERIC
}
]);
});
describe('class', () => {
it('property type definition', () => {
const analyzer = new TypeAnalyzer(`
class A {
a: number;
public b: string;
protected c: {
e: 1
}
private d: () => void = () => {}
e!: boolean;
g?: string;
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 14, end: 22 },
text: ': number',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 34, end: 42 },
text: ': string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 57, end: 73 },
text: ': {\n e: 1\n }',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 85, end: 97 },
text: ': () => void',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { pos: 112, end: 122 },
text: '!: boolean',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
},
{
range: { end: 136, pos: 127 },
text: '?: string',
kind: TYPE_KIND.CLASS_PROPERTY_TYPE_DEFINITION
}
]);
});
it('method declaration', () => {
const analyzer = new TypeAnalyzer(`
class A {
public a(p: 1): boolean;
public a(p: 2): number;
public a(p: 1 | 2): boolean | number {
return '' as any;
}
public b(a: number): string;
protected c(b: number | 1): {
e: 1
}
protected get compileUtils(): any | 'compileUtils' {
const abc = {
getConfig: (): ReadonlyDeep<InnerCompilerConfig> => {
return getCurrentCompileConfig() as any as unknown;
},
b(): void {}
}
}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 11, end: 37 },
text: ' public a(p: 1): boolean;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 38, end: 63 },
text: ' public a(p: 2): number;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 76, end: 83 },
text: ': 1 | 2',
kind: TYPE_KIND.FUNCTION_PARAMETER
},
{
range: { pos: 84, end: 102 },
text: ': boolean | number',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 118, end: 125 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 131, end: 161 },
text: ' public b(a: number): string;',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 162, end: 206 },
text: ' protected c(b: number | 1): {\n e: 1\n }',
kind: TYPE_KIND.FUNCTION_OVERLOAD
},
{
range: { pos: 237, end: 259 },
text: ": any | 'compileUtils'",
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 299, end: 334 },
text: ': ReadonlyDeep<InnerCompilerConfig>',
kind: TYPE_KIND.FUNCTION_RETURN
},
{
range: { pos: 380, end: 387 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 387, end: 398 },
text: ' as unknown',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 418, end: 424 },
text: ': void',
kind: TYPE_KIND.FUNCTION_RETURN
}
]);
});
it('constructor', () => {
const analyzer = new TypeAnalyzer(`
class A {
constructor(a: number) {}
}
`);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 26, end: 34 },
text: ': number',
kind: TYPE_KIND.FUNCTION_PARAMETER
}
]);
});
});
describe('tsx', () => {
it('generic arguments', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number> />
const b = <A<number, string> />
const c = <A<number, string, null> />
const d = <A
<number, string, null, 1, 2 | 3, [22]>
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 49, end: 65 },
text: '<number, string>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 83, end: 105 },
text: '<number, string, null>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 128, end: 166 },
text: '<number, string, null, 1, 2 | 3, [22]>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
}
]);
});
it('integration', () => {
const analyzer = new TypeAnalyzer(
`
const a = <Component<number>
name
test={111 as any}
t2={\`...\${11 as string}\`}
{...test as object}
/>
`,
true
);
analyzer.analyze();
expect(analyzer.analyzedTypes).toMatchObject([
{
range: { pos: 23, end: 31 },
text: '<number>',
kind: TYPE_KIND.TSX_COMPONENT_GENERIC
},
{
range: { pos: 58, end: 65 },
text: ' as any',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 85, end: 95 },
text: ' as string',
kind: TYPE_KIND.AS_ASSERTION
},
{
range: { pos: 113, end: 123 },
text: ' as object',
kind: TYPE_KIND.AS_ASSERTION
}
]);
});
});
| src/core/helpers/type-analyzer/index.test.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " ]);\n }\n }\n }\n private pushAnalyzedType(\n kind: AnalyzedType['kind'],\n range: [pos: number, end: number]\n ) {\n const [pos, end] = range;\n const text = this.sourceFile.text.slice(pos, end);",
"score": 18.892185382181598
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.push({ kind, range: { pos, end }, text });\n }\n}",
"score": 14.79500541829947
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " return this.pushAnalyzedType(TYPE_KIND.ANGLE_BRACKETS_ASSERTION, [\n prevNode.end - 1,\n nextNode.pos + 1\n ]);\n }\n // context = `a as number` | `a satisfies number`, curChild = `number`\n function handleParentAsOrSatisfiesExpr(\n this: TypeAnalyzer,\n parent: ts.AsExpression | ts.SatisfiesExpression,\n curChild: ts.Node",
"score": 12.965709724192605
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": "import vscode from 'vscode';\nimport { type AnalyzedType, TypeAnalyzer } from './helpers/type-analyzer';\nimport { debounce, isEqual } from 'lodash-es';\nimport { GlobalState } from './global-state';\nimport { Config } from './config';\ntype FoldingRange = Record<'start' | 'end', /* lineNumber */ number>;\ninterface EditorInfo {\n code: string;\n analyzedTypes: AnalyzedType[];\n isTSX: boolean;",
"score": 12.68704919798575
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " this.analyzedTypes.forEach(type => {\n const oldTextLength = type.text.length;\n type.text = type.text.replace(/^[\\r\\n]+/, '');\n const startLineBreakCount = oldTextLength - type.text.length;\n type.text = type.text.replace(/[\\r\\n]+$/, '');\n const endLineBreakCount = oldTextLength - startLineBreakCount - type.text.length;\n type.range.pos += startLineBreakCount;\n type.range.end -= endLineBreakCount;\n });\n }",
"score": 12.061634683405618
}
] | typescript | kind: TYPE_KIND.ANGLE_BRACKETS_ASSERTION
},
{ |
import type { ReadonlyDeep } from 'type-fest';
import vscode from 'vscode';
import fs from 'fs-extra';
import { log } from './log';
import { TYPE_KIND } from './helpers/type-analyzer/constants';
interface ExtensionConfig {
/** @default true */
enabled: boolean;
/** @default `{$ExtensionRootPath}/res/type-icon.png` */
typeIconPath: string;
/** @default [] */
ignoreTypeKinds: TYPE_KIND[];
}
const defaultTypeIconPath = `${__dirname}/../res/type-icon.png`;
export class Config {
private static _instance: Config;
/** instance */
static get i(): Config {
return (Config._instance ??= new Config());
}
get(): ReadonlyDeep<ExtensionConfig> {
return Object.freeze(this.config);
}
private sync() {
const config = vscode.workspace.getConfiguration('ts-type-hidden');
this.config = {
enabled: config.get('enabled', true),
typeIconPath: config.get('typeIconPath') || defaultTypeIconPath,
ignoreTypeKinds: config.get('ignoreTypeKinds', [])
} satisfies ExtensionConfig;
}
private config!: ExtensionConfig;
private watchCallbacks: Array<Function> = [];
private constructor() {
this.sync();
this.verify();
this.watch();
}
update() {
this.sync();
| log.appendLine(`Config updated:
${JSON.stringify(this.config, null, 2)} |
`);
}
registerWatchCallback(fn: Function) {
this.watchCallbacks.push(fn);
}
private verify() {
if (!fs.existsSync(this.config.typeIconPath)) {
vscode.window.showErrorMessage(
'[ts-type-hidden configuration]: \n`typeIconPath` is not a valid path'
);
this.config.typeIconPath = defaultTypeIconPath;
}
for (let i = this.config.ignoreTypeKinds.length - 1; i >= 0; i--) {
const typeKindToIgnore = this.config.ignoreTypeKinds[i];
const isInvalid = !Object.values(TYPE_KIND).includes(typeKindToIgnore);
if (isInvalid) {
this.config.ignoreTypeKinds.splice(i, 1);
vscode.window.showErrorMessage(
`[ts-type-hidden configuration]: \n\`ignoreTypeKinds.${typeKindToIgnore}\` is not a valid value`
);
}
}
}
private watch() {
vscode.workspace.onDidChangeConfiguration(() => {
this.update();
this.verify();
this.watchCallbacks.forEach(cb => cb());
});
}
}
| src/core/config.ts | xlboy-ts-type-hidden-a749a29 | [
{
"filename": "src/core/global-state.ts",
"retrieved_chunk": " public static init(vscodeContext: vscode.ExtensionContext) {\n GlobalState._instance = new GlobalState(vscodeContext);\n }\n private constructor(private readonly vscodeContext: vscode.ExtensionContext) {}\n get isHiddenMode() {\n return this.vscodeContext.globalState.get('isHiddenMode', true);\n }\n set isHiddenMode(value: boolean) {\n this.vscodeContext.globalState.update('isHiddenMode', value);\n }",
"score": 17.058216720169746
},
{
"filename": "src/core/editor-context.ts",
"retrieved_chunk": " }\n public static init() {\n EditorContext._instance = new EditorContext();\n }\n private editors = new Map</* filePath */ string, EditorInfo>();\n private curFocusedTypes: AnalyzedType[] = [];\n private constructor() {\n this.register();\n this.initVisibleEditors();\n this.decoration.init();",
"score": 16.681161619507584
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " analyze() {\n this.visit(this.sourceFile, null);\n this.cleanAnalyzedTypes();\n return this.analyzedTypes;\n }\n private cleanAnalyzedTypes() {\n clearUselessTypes.call(this);\n clearLineBreakOfStartOrEnd.call(this);\n return;\n function clearLineBreakOfStartOrEnd(this: TypeAnalyzer) {",
"score": 14.40937087133836
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " }\n });\n });\n const sortedToRemoveIndexs = Array.from(indexsToRemove).sort((a, b) => b - a);\n sortedToRemoveIndexs.forEach(index => this.analyzedTypes.splice(index, 1));\n }\n }\n private visit(node: ts.Node, parent: ts.Node | null) {\n if (\n ts.isTypeNode(node) ||",
"score": 13.998123700595311
},
{
"filename": "src/core/helpers/type-analyzer/index.ts",
"retrieved_chunk": " } else {\n ts.forEachChild(node, child => this.visit(child, node));\n }\n }\n private handleDifferentNode(parent: ts.Node, child: ts.Node) {\n type NodeHandlers = Partial<Record<ts.SyntaxKind, Function>>;\n const parentNodeHandlers: NodeHandlers = {\n [ts.SyntaxKind.FunctionDeclaration]: handleParentFunction.bind(this),\n [ts.SyntaxKind.MethodDeclaration]: handleParentFunction.bind(this),\n [ts.SyntaxKind.FunctionExpression]: handleParentFunction.bind(this),",
"score": 12.061249931037286
}
] | typescript | log.appendLine(`Config updated:
${JSON.stringify(this.config, null, 2)} |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
| await createDirectory(getConfig().currentVectorStoreDatabasePath); |
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": "import { HNSWLib } from 'langchain/vectorstores/hnswlib';\n/**\n * Retrieves relevant context for the given question by performing a similarity search on the provided vector store.\n * @param {HNSWLib} vectorStore - HNSWLib is a library for approximate nearest neighbor search, used to\n * search for similar vectors in a high-dimensional space.\n * @param {string} sanitizedQuestion - The sanitized version of the question that needs to be answered.\n * It is a string input.\n * @param {number} numDocuments - The `numDocuments` parameter is the number of documents that the\n * `getRelevantContext` function should retrieve from the `vectorStore` based on their similarity to\n * the `sanitizedQuestion`.",
"score": 51.81751590888221
},
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": " * @returns The function `getRelevantContext` is returning a Promise that resolves to a string. The\n * string is the concatenation of the `pageContent` property of the top `numDocuments` documents\n * returned by a similarity search performed on a `vectorStore` using the `sanitizedQuestion` as the\n * query. The resulting string is trimmed and all newline characters are replaced with spaces.\n */\nasync function getRelevantContext(\n vectorStore: HNSWLib,\n sanitizedQuestion: string,\n numDocuments: number\n): Promise<string> {",
"score": 49.96683933979949
},
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": "/**\n * The function resolves a URL from a given base URL and returns the resolved URL as a string.\n * @param {string} from - The `from` parameter is a string representing the base URL that the `to`\n * parameter will be resolved against. It can be an absolute or relative URL.\n * @param {string} to - The `to` parameter is a string representing the URL that needs to be resolved.\n * It can be an absolute URL or a relative URL.\n * @returns The function `resolve` returns a string that represents the resolved URL. If the `to`\n * parameter is a relative URL, the function returns a string that represents the resolved URL relative\n * to the `from` parameter. If the `to` parameter is an absolute URL, the function returns a string\n * that represents the resolved URL.",
"score": 39.81615780545772
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 38.68883760244363
},
{
"filename": "src/commands/command.ts",
"retrieved_chunk": "/**\n * The function creates a command object with a name, aliases, description, and an execute function\n * that returns a Promise.\n * @param {string} name - A string representing the name of the command.\n * @param {string[]} aliases - An array of alternative names that can be used to call the command. For\n * example, if the command is named \"help\", aliases could include \"h\" or \"info\".\n * @param {string} description - A brief description of what the command does.\n * @param execute - The `execute` parameter is a function that takes in three arguments:\n * @returns A `Command` object is being returned.\n */",
"score": 38.165202652697936
}
] | typescript | await createDirectory(getConfig().currentVectorStoreDatabasePath); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
| const filesToAdd = await getDirectoryFiles(docsDirectory); |
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 47.79846691227131
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 22.553130268828504
},
{
"filename": "src/config/index.ts",
"retrieved_chunk": " return {\n text: 'Loading',\n stream: output,\n discardStdin: false,\n };\n}\nconst defaultConfig: Config = {\n currentVectorStoreDatabasePath: path.join(getProjectRoot(), process.env.VECTOR_STORE_DIR || 'db/default'),\n numContextDocumentsToRetrieve: 6,\n numMemoryDocumentsToRetrieve: 4,",
"score": 20.966497179559283
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nasync function deleteMemoryDirectory() {\n try {\n const files = await fs.readdir(memoryDirectory);\n const deletePromises = files.map((file) => fs.unlink(path.join(memoryDirectory, file)));\n await Promise.all(deletePromises);\n return `All files in the memory directory have been deleted.`;\n } catch (error) {\n if (error instanceof Error) {\n return chalk.red(`All files in the memory directory have been deleted: ${error.message}`);",
"score": 20.37458444226676
},
{
"filename": "src/index.ts",
"retrieved_chunk": "const llm = new OpenAIChat({\n streaming: true,\n callbackManager,\n modelName: process.env.MODEL || 'gpt-3.5-turbo',\n});\nconst systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`\n ${systemPromptTemplate}\n`);\nconst chatPrompt = ChatPromptTemplate.fromPromptMessages([\n systemPrompt,",
"score": 18.96770526870548
}
] | typescript | const filesToAdd = await getDirectoryFiles(docsDirectory); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = | new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired); |
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": "import * as cheerio from 'cheerio';\nimport Crawler, { CrawlerRequestResponse } from 'crawler';\nimport { stderr } from 'node:process';\nimport resolveURL from '../utils/resolveURL.js';\n// import TurndownService from 'turndown';\n// const turndownService = new TurndownService();\ntype ProgressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => void;\ninterface Page {\n url: string;\n text: string;",
"score": 38.67380831181566
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " const text = $(this.selector).text();\n // const text = turndownService.turndown(html || '');\n const page: Page = {\n url: res.request.uri.href,\n text,\n title,\n };\n if (text.length > this.textLengthMinimum) {\n this.pages.push(page);\n this.progressCallback(this.count + 1, this.pages.length, res.request.uri.href);",
"score": 23.227986027414637
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " selector: string;\n progressCallback: ProgressCallback;\n crawler: Crawler;\n constructor(\n urls: string[],\n progressCallback: ProgressCallback,\n selector = 'body',\n limit = 20,\n textLengthMinimum = 200\n ) {",
"score": 20.227318996428533
},
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": " */\nexport default function resolve(from: string, to: string) {\n const resolvedUrl = new URL(to, new URL(from, 'resolve://'));\n if (resolvedUrl.protocol === 'resolve:') {\n // `from` is a relative URL.\n const { pathname, search, hash } = resolvedUrl;\n return pathname + search + hash;\n }\n return resolvedUrl.toString();\n}",
"score": 20.05425681386968
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " this.urls = urls;\n this.selector = selector;\n this.limit = limit;\n this.textLengthMinimum = textLengthMinimum;\n this.progressCallback = progressCallback;\n this.count = 0;\n this.pages = [];\n this.crawler = new Crawler({\n maxConnections: 10,\n callback: this.handleRequest,",
"score": 17.91684317609159
}
] | typescript | new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
| const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath))); |
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 48.61303727854711
},
{
"filename": "src/utils/getDirectoryFiles.ts",
"retrieved_chunk": "import path from 'path';\nimport fs from 'node:fs/promises';\nexport default async function getDirectoryFiles(directoryPath: string): Promise<string[]> {\n const fileNames = await fs.readdir(directoryPath);\n const filePathsPromises = fileNames.map(async (fileName) => {\n const filePath = path.join(directoryPath, fileName);\n const stat = await fs.stat(filePath);\n if (stat.isDirectory()) {\n const subDirectoryFiles = await getDirectoryFiles(filePath);\n return subDirectoryFiles;",
"score": 32.144930019277204
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nasync function deleteMemoryDirectory() {\n try {\n const files = await fs.readdir(memoryDirectory);\n const deletePromises = files.map((file) => fs.unlink(path.join(memoryDirectory, file)));\n await Promise.all(deletePromises);\n return `All files in the memory directory have been deleted.`;\n } catch (error) {\n if (error instanceof Error) {\n return chalk.red(`All files in the memory directory have been deleted: ${error.message}`);",
"score": 26.694696471791698
},
{
"filename": "src/utils/getDirectoryFiles.ts",
"retrieved_chunk": " }\n return filePath;\n });\n const filePathsArray = await Promise.all(filePathsPromises);\n const filePaths = filePathsArray.flat();\n return filePaths;\n}",
"score": 23.738041780616097
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 23.513994200566486
}
] | typescript | const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath))); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory( | getConfig().currentVectorStoreDatabasePath); |
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": "import { HNSWLib } from 'langchain/vectorstores/hnswlib';\n/**\n * Retrieves relevant context for the given question by performing a similarity search on the provided vector store.\n * @param {HNSWLib} vectorStore - HNSWLib is a library for approximate nearest neighbor search, used to\n * search for similar vectors in a high-dimensional space.\n * @param {string} sanitizedQuestion - The sanitized version of the question that needs to be answered.\n * It is a string input.\n * @param {number} numDocuments - The `numDocuments` parameter is the number of documents that the\n * `getRelevantContext` function should retrieve from the `vectorStore` based on their similarity to\n * the `sanitizedQuestion`.",
"score": 51.81751590888221
},
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": " * @returns The function `getRelevantContext` is returning a Promise that resolves to a string. The\n * string is the concatenation of the `pageContent` property of the top `numDocuments` documents\n * returned by a similarity search performed on a `vectorStore` using the `sanitizedQuestion` as the\n * query. The resulting string is trimmed and all newline characters are replaced with spaces.\n */\nasync function getRelevantContext(\n vectorStore: HNSWLib,\n sanitizedQuestion: string,\n numDocuments: number\n): Promise<string> {",
"score": 49.96683933979949
},
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": "/**\n * The function resolves a URL from a given base URL and returns the resolved URL as a string.\n * @param {string} from - The `from` parameter is a string representing the base URL that the `to`\n * parameter will be resolved against. It can be an absolute or relative URL.\n * @param {string} to - The `to` parameter is a string representing the URL that needs to be resolved.\n * It can be an absolute URL or a relative URL.\n * @returns The function `resolve` returns a string that represents the resolved URL. If the `to`\n * parameter is a relative URL, the function returns a string that represents the resolved URL relative\n * to the `from` parameter. If the `to` parameter is an absolute URL, the function returns a string\n * that represents the resolved URL.",
"score": 39.81615780545772
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 38.68883760244363
},
{
"filename": "src/commands/command.ts",
"retrieved_chunk": "/**\n * The function creates a command object with a name, aliases, description, and an execute function\n * that returns a Promise.\n * @param {string} name - A string representing the name of the command.\n * @param {string[]} aliases - An array of alternative names that can be used to call the command. For\n * example, if the command is named \"help\", aliases could include \"h\" or \"info\".\n * @param {string} description - A brief description of what the command does.\n * @param execute - The `execute` parameter is a function that takes in three arguments:\n * @returns A `Command` object is being returned.\n */",
"score": 38.165202652697936
}
] | typescript | getConfig().currentVectorStoreDatabasePath); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
| const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []); |
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 46.94574434448665
},
{
"filename": "src/utils/getDirectoryFiles.ts",
"retrieved_chunk": "import path from 'path';\nimport fs from 'node:fs/promises';\nexport default async function getDirectoryFiles(directoryPath: string): Promise<string[]> {\n const fileNames = await fs.readdir(directoryPath);\n const filePathsPromises = fileNames.map(async (fileName) => {\n const filePath = path.join(directoryPath, fileName);\n const stat = await fs.stat(filePath);\n if (stat.isDirectory()) {\n const subDirectoryFiles = await getDirectoryFiles(filePath);\n return subDirectoryFiles;",
"score": 33.71277501128078
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nasync function deleteMemoryDirectory() {\n try {\n const files = await fs.readdir(memoryDirectory);\n const deletePromises = files.map((file) => fs.unlink(path.join(memoryDirectory, file)));\n await Promise.all(deletePromises);\n return `All files in the memory directory have been deleted.`;\n } catch (error) {\n if (error instanceof Error) {\n return chalk.red(`All files in the memory directory have been deleted: ${error.message}`);",
"score": 25.53919591674721
},
{
"filename": "src/utils/getDirectoryFiles.ts",
"retrieved_chunk": " }\n return filePath;\n });\n const filePathsArray = await Promise.all(filePathsPromises);\n const filePaths = filePathsArray.flat();\n return filePaths;\n}",
"score": 25.300521303442682
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " }\n return chalk.red(`All files in the memory directory have been deleted: ${error}`);\n }\n}\nasync function resetMemoryVectorStore(onReset: (newMemoryVectorStore: HNSWLib) => void) {\n const newMemoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });\n await deleteMemoryDirectory();",
"score": 24.449774468218685
}
] | typescript | const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []); |
import * as cheerio from 'cheerio';
import Crawler, { CrawlerRequestResponse } from 'crawler';
import { stderr } from 'node:process';
import resolveURL from '../utils/resolveURL.js';
// import TurndownService from 'turndown';
// const turndownService = new TurndownService();
type ProgressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => void;
interface Page {
url: string;
text: string;
title: string;
}
/* The WebCrawler class is a TypeScript implementation of a web crawler that can extract text from web
pages and follow links to crawl more pages. */
class WebCrawler {
pages: Page[];
limit: number;
urls: string[];
count: number;
textLengthMinimum: number;
selector: string;
progressCallback: ProgressCallback;
crawler: Crawler;
constructor(
urls: string[],
progressCallback: ProgressCallback,
selector = 'body',
limit = 20,
textLengthMinimum = 200
) {
this.urls = urls;
this.selector = selector;
this.limit = limit;
this.textLengthMinimum = textLengthMinimum;
this.progressCallback = progressCallback;
this.count = 0;
this.pages = [];
this.crawler = new Crawler({
maxConnections: 10,
callback: this.handleRequest,
userAgent: 'node-crawler',
});
}
/* `handleRequest` is a method that handles the response of a web page request made by the `crawler`
object. It takes in three parameters: `error`, `res`, and `done`. */
handleRequest = (error: Error | null, res: CrawlerRequestResponse, done: () => void) => {
if (error) {
stderr.write(error.message);
done();
return;
}
const $ = cheerio.load(res.body);
// Remove obviously superfluous elements
$('script').remove();
$('header').remove();
$('nav').remove();
$('style').remove();
$('img').remove();
$('svg').remove();
const title = $('title').text() || '';
const text = $(this.selector).text();
// const text = turndownService.turndown(html || '');
const page: Page = {
url: res.request.uri.href,
text,
title,
};
if (text.length > this.textLengthMinimum) {
this.pages.push(page);
this.progressCallback(this.count + 1, this.pages.length, res.request.uri.href);
}
$('a').each((_i: number, elem: cheerio.Element) => {
if (this.count >= this.limit) {
return false; // Stop iterating once the limit is reached
}
const href = $(elem).attr('href')?.split('#')[0];
const uri = res.request.uri.href;
const | url = href && resolveURL(uri, href); |
// crawl more
if (url && this.urls.some((u) => url.includes(u))) {
this.crawler.queue(url);
this.count += 1;
}
return true; // Continue iterating when the limit is not reached
});
done();
};
start = async () => {
this.pages = [];
return new Promise((resolve) => {
this.crawler.on('drain', () => {
resolve(this.pages);
});
this.urls.forEach((url) => {
this.crawler.queue(url);
});
});
};
}
export default WebCrawler;
| src/lib/crawler.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/commands/addURLCommand.ts",
"retrieved_chunk": " To avoid this, you can try to target a specific selector such as \\`.main\\``,\n async (args, output) => {\n if (!args || args.length > 4) {\n output.write(\n chalk.red(\n 'Invalid number of arguments. Usage: /add-url `url` `selector to extract` `Maximum number of links to follow` `Ignore pages with less than n characters`\\n'\n )\n );\n return;\n }",
"score": 27.63039632857436
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " * @param {string} selector - The selector parameter is a string that represents a CSS selector used to\n * identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the\n * elements that match the selector.\n * @param {number} maxPages - The maximum number of pages to crawl for the given URL.\n * @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies\n * the minimum number of characters required for a document to be considered valid and used for\n * generating embeddings. Any document with less than this number of characters will be discarded.\n * @returns Nothing is being returned explicitly in the function, but it is implied that the function\n * will return undefined if there are no errors.\n */",
"score": 19.539367688077945
},
{
"filename": "src/utils/getDirectoryListWithDetails.ts",
"retrieved_chunk": " const res = path.resolve(directory, dirent.name);\n if (dirent.isDirectory()) {\n const subdirContents = await getDirectoryListWithDetails(res, newContents);\n Object.assign(newContents, subdirContents);\n } else if (dirent.isFile() && dirent.name !== '.gitignore') {\n const stats = await fs.stat(res);\n files.push({ name: dirent.name, size: Math.ceil(stats.size / 1024) });\n }\n });\n await Promise.all(actions);",
"score": 19.475867768718302
},
{
"filename": "src/lib/contextManager.ts",
"retrieved_chunk": " }\n}\nasync function listContextStores() {\n const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root\n const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';\n const targetDir = path.join(projectRoot, vectorStoreDir);\n const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);\n output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\\n\\n`));\n Object.entries(contextVectorStoresList).forEach(([dir, files]) => {\n output.write(chalk.yellow(`Directory: ${dir}`));",
"score": 19.268864727002637
},
{
"filename": "src/commands/addYouTubeCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /add-url `youtube url` or `youtube videoid`\\n'));\n return;\n }\n const URLOrVideoID = args[0];\n await addYouTube(URLOrVideoID);\n }\n);\nexport default addYouTubeCommand;",
"score": 18.020036151135272
}
] | typescript | url = href && resolveURL(uri, href); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
| const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve); |
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
await logChat(chatLogDirectory, question, response.response);
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/commands/setContextConfigCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /context-config `number of documents`\\n'));\n return;\n }\n const numContextDocumentsToRetrieve = parseInt(args[0], 10);\n setNumContextDocumentsToRetrieve(numContextDocumentsToRetrieve);\n const config = getConfig();\n output.write(chalk.blue(`Number of context documents to retrieve set to ${config.numContextDocumentsToRetrieve}`));\n }\n);",
"score": 33.17879584336979
},
{
"filename": "src/commands.ts",
"retrieved_chunk": " getCommands,\n async execute(commandName: string, args: string[], output: NodeJS.WriteStream) {\n const command = commands.find((cmd) => cmd.name === commandName || cmd.aliases.includes(commandName));\n if (command) {\n await command.execute(args, output, commandHandler);\n } else {\n output.write(chalk.red('Unknown command. Type /help to see the list of available commands.\\n'));\n }\n },\n };",
"score": 29.590925921345477
},
{
"filename": "src/commands/setMemoryConfigCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /memory-config `number of documents`\\n'));\n return;\n }\n const numMemoryDocumentsToRetrieve = parseInt(args[0], 10);\n setNumMemoryDocumentsToRetrieve(numMemoryDocumentsToRetrieve);\n const config = getConfig();\n output.write(chalk.blue(`Number of memory documents to retrieve set to ${config.numMemoryDocumentsToRetrieve}`));\n }\n);",
"score": 25.662486547403486
},
{
"filename": "src/commands/setContextConfigCommand.ts",
"retrieved_chunk": "import chalk from 'chalk';\nimport createCommand from './command.js';\nimport { setNumContextDocumentsToRetrieve, getConfig } from '../config/index.js';\nconst setContextConfigCommand = createCommand(\n 'context-config',\n ['cc'],\n `Sets the number of relevant documents to return from the context vector store.\\n\n Arguments: \\`number of documents\\` (Default: 6)\\n\n Example: \\`/context-config 10\\``,\n async (args, output) => {",
"score": 23.10302206564482
},
{
"filename": "src/commands/switchContextStoreCommand.ts",
"retrieved_chunk": " if (!args || args.length !== 1) {\n output.write(chalk.red('Invalid number of arguments. Usage: /change-context-store `subdirectory`\\n'));\n return;\n }\n const subDirectory = args[0];\n await loadOrCreateEmptyVectorStore(subDirectory);\n }\n);\nexport default changeContextStoreCommand;",
"score": 22.462481831630377
}
] | typescript | const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve); |
/* eslint-disable no-await-in-loop */
import dotenv from 'dotenv';
import { OpenAIChat } from 'langchain/llms/openai';
// eslint-disable-next-line import/no-unresolved
import * as readline from 'node:readline/promises';
import path from 'path';
import fs from 'fs';
/* This line of code is importing the `stdin` and `stdout` streams from the `process` module in
Node.js. These streams are used for reading input from the user and writing output to the console,
respectively. */
import { stdin as input, stdout as output } from 'node:process';
import { CallbackManager } from 'langchain/callbacks';
import { ChatPromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts';
import { LLMChain } from 'langchain/chains';
import { oneLine } from 'common-tags';
import chalk from 'chalk';
import logChat from './chatLogger.js';
import createCommandHandler from './commands.js';
import { getMemoryVectorStore, addDocumentsToMemoryVectorStore, getBufferWindowMemory } from './lib/memoryManager.js';
import { getContextVectorStore } from './lib/contextManager.js';
import { getRelevantContext } from './lib/vectorStoreUtils.js';
import sanitizeInput from './utils/sanitizeInput.js';
import { getConfig, getProjectRoot } from './config/index.js';
const projectRootDir = getProjectRoot();
dotenv.config();
// Set up the chat log directory
const chatLogDirectory = path.join(projectRootDir, 'chat_logs');
// Get the prompt template
const systemPromptTemplate = fs.readFileSync(path.join(projectRootDir, 'src/prompt.txt'), 'utf8');
// Set up the readline interface to read input from the user and write output to the console
const rl = readline.createInterface({ input, output });
// Set up CLI commands
const commandHandler: CommandHandler = createCommandHandler();
const callbackManager = CallbackManager.fromHandlers({
// This function is called when the LLM generates a new token (i.e., a prediction for the next word)
async handleLLMNewToken(token: string) {
// Write the token to the output stream (i.e., the console)
output.write(token);
},
});
const llm = new OpenAIChat({
streaming: true,
callbackManager,
modelName: process.env.MODEL || 'gpt-3.5-turbo',
});
const systemPrompt = SystemMessagePromptTemplate.fromTemplate(oneLine`
${systemPromptTemplate}
`);
const chatPrompt = ChatPromptTemplate.fromPromptMessages([
systemPrompt,
HumanMessagePromptTemplate.fromTemplate('QUESTION: """{input}"""'),
]);
const windowMemory = getBufferWindowMemory();
const chain = new LLMChain({
prompt: chatPrompt,
memory: windowMemory,
llm,
});
// eslint-disable-next-line no-constant-condition
while (true) {
output.write(chalk.green('\nStart chatting or type /help for a list of commands\n'));
const userInput = await rl.question('> ');
let response;
if (userInput.startsWith('/')) {
const [command, ...args] = userInput.slice(1).split(' ');
await commandHandler.execute(command, args, output);
} else {
const memoryVectorStore = await getMemoryVectorStore();
const contextVectorStore = await getContextVectorStore();
const question = sanitizeInput(userInput);
const config = getConfig();
const context = await getRelevantContext(contextVectorStore, question, config.numContextDocumentsToRetrieve);
const history = await getRelevantContext(memoryVectorStore, question, config.numMemoryDocumentsToRetrieve);
try {
response = await chain.call({
input: question,
context,
history,
immediate_history: config.useWindowMemory ? windowMemory : '',
});
if (response) {
await addDocumentsToMemoryVectorStore([
{ content: question, metadataType: 'question' },
{ content: response.text, metadataType: 'answer' },
]);
| await logChat(chatLogDirectory, question, response.response); |
}
} catch (error) {
if (error instanceof Error && error.message.includes('Cancel:')) {
// TODO: Handle cancel
} else if (error instanceof Error) {
output.write(chalk.red(error.message));
} else {
output.write(chalk.red(error));
}
}
}
output.write('\n');
}
| src/index.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": " documents: Array<{ content: string; metadataType: string }>\n): Promise<void> {\n const formattedDocuments = documents.map(\n (doc) => new Document({ pageContent: doc.content, metadata: { type: doc.metadataType } })\n );\n await memoryWrapper.vectorStoreInstance.addDocuments(formattedDocuments);\n await saveMemoryVectorStore();\n}\nfunction resetBufferWindowMemory() {\n bufferWindowMemory.clear();",
"score": 26.12784570219595
},
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": "const getLogFilename = (): string => {\n const currentDate = new Date();\n const year = currentDate.getFullYear();\n const month = String(currentDate.getMonth() + 1).padStart(2, '0');\n const day = String(currentDate.getDate()).padStart(2, '0');\n return `${year}-${month}-${day}.json`;\n};\nconst logChat = async (logDirectory: string, question: string, answer: string): Promise<void> => {\n const timestamp = new Date().toISOString();\n const chatHistory: ChatHistory = { timestamp, question, answer };",
"score": 20.768118191078635
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " userAgent: 'node-crawler',\n });\n }\n /* `handleRequest` is a method that handles the response of a web page request made by the `crawler`\nobject. It takes in three parameters: `error`, `res`, and `done`. */\n handleRequest = (error: Error | null, res: CrawlerRequestResponse, done: () => void) => {\n if (error) {\n stderr.write(error.message);\n done();\n return;",
"score": 16.950446248300423
},
{
"filename": "src/chatLogger.ts",
"retrieved_chunk": "import fs from 'fs-extra';\nimport path from 'path';\ninterface ChatHistory {\n timestamp: string;\n question: string;\n answer: string;\n}\nconst ensureLogDirectory = (logDirectory: string): void => {\n fs.ensureDirSync(logDirectory);\n};",
"score": 16.61447602860862
},
{
"filename": "src/lib/vectorStoreUtils.ts",
"retrieved_chunk": "import { HNSWLib } from 'langchain/vectorstores/hnswlib';\n/**\n * Retrieves relevant context for the given question by performing a similarity search on the provided vector store.\n * @param {HNSWLib} vectorStore - HNSWLib is a library for approximate nearest neighbor search, used to\n * search for similar vectors in a high-dimensional space.\n * @param {string} sanitizedQuestion - The sanitized version of the question that needs to be answered.\n * It is a string input.\n * @param {number} numDocuments - The `numDocuments` parameter is the number of documents that the\n * `getRelevantContext` function should retrieve from the `vectorStore` based on their similarity to\n * the `sanitizedQuestion`.",
"score": 11.478997167755683
}
] | typescript | await logChat(chatLogDirectory, question, response.response); |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce( | (acc, val) => acc.concat(val), []); |
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired);
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/utils/getDirectoryFiles.ts",
"retrieved_chunk": "import path from 'path';\nimport fs from 'node:fs/promises';\nexport default async function getDirectoryFiles(directoryPath: string): Promise<string[]> {\n const fileNames = await fs.readdir(directoryPath);\n const filePathsPromises = fileNames.map(async (fileName) => {\n const filePath = path.join(directoryPath, fileName);\n const stat = await fs.stat(filePath);\n if (stat.isDirectory()) {\n const subDirectoryFiles = await getDirectoryFiles(filePath);\n return subDirectoryFiles;",
"score": 32.35743548915352
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "const memoryDirectory = path.join(projectRootDir, process.env.MEMORY_VECTOR_STORE_DIR || 'memory');\nlet memoryVectorStore: HNSWLib;\ntry {\n memoryVectorStore = await HNSWLib.load(memoryDirectory, new OpenAIEmbeddings());\n} catch {\n output.write(`${chalk.blue(`Creating a new memory vector store index in the ${memoryDirectory} directory`)}\\n`);\n memoryVectorStore = new HNSWLib(new OpenAIEmbeddings(), {\n space: 'cosine',\n numDimensions: 1536,\n });",
"score": 32.26360754676673
},
{
"filename": "src/lib/memoryManager.ts",
"retrieved_chunk": "}\nasync function deleteMemoryDirectory() {\n try {\n const files = await fs.readdir(memoryDirectory);\n const deletePromises = files.map((file) => fs.unlink(path.join(memoryDirectory, file)));\n await Promise.all(deletePromises);\n return `All files in the memory directory have been deleted.`;\n } catch (error) {\n if (error instanceof Error) {\n return chalk.red(`All files in the memory directory have been deleted: ${error.message}`);",
"score": 24.424358503446868
},
{
"filename": "src/utils/getDirectoryFiles.ts",
"retrieved_chunk": " }\n return filePath;\n });\n const filePathsArray = await Promise.all(filePathsPromises);\n const filePaths = filePathsArray.flat();\n return filePaths;\n}",
"score": 24.051170989200063
},
{
"filename": "src/updateReadme.ts",
"retrieved_chunk": "import fs from 'fs';\nimport path from 'path';\nimport { getProjectRoot } from './config/index.js';\nconst projectRootDir = getProjectRoot();\nconst commandsDir = path.join(projectRootDir, 'src', 'commands');\nconst readmePath = path.join(projectRootDir, 'README.md');\nconst commandFiles = fs.readdirSync(commandsDir).filter((file) => file !== 'command.ts');\nasync function getCommandsMarkdown() {\n const commandsPromises = commandFiles.map(async (file) => {\n const commandModule = await import(path.join(commandsDir, file));",
"score": 20.030346509478033
}
] | typescript | (acc, val) => acc.concat(val), []); |
import { COMMENT_PRE, commentPreReg } from '../constants'
import { parsePlatform } from './parsePlatform'
export function parseComment(code: string) {
if (code.trim().length === 0)
return
const commentResults = [...code.matchAll(commentPreReg)]
if (commentResults.length === 0)
return
const commentAST = []
for (let i = 0; i < commentResults.length; i++) {
const item = commentResults[i]
const index = item.index!
const [self, commentPre, _space, prefix, _platform] = item
if (!COMMENT_PRE.includes(commentPre))
continue
const platform = _platform.trim()
if (platform && prefix !== '#endif') {
const prefixStart = self.indexOf(prefix) + index
const prefixEnd = prefixStart + prefix.length
commentAST.push({
start: prefixStart,
end: prefixEnd,
type: 'prefix',
row: prefix,
})
const platforms = parsePlatform(platform, commentPre)
if (!platforms)
continue
if (platforms.length > 1) {
const orRex = /\|\|/g
const orResult = [...platform.matchAll(orRex)]
const offset = index + self.indexOf(_platform) + 1
orResult.forEach((element) => {
const orStart = offset + element.index!
const orEnd = orStart + 2
commentAST.push({
start: orStart,
end: orEnd,
type: 'prefix',
row: element[0],
})
})
}
platforms.forEach( | (element) => { |
const platformStart = self.indexOf(element) + index
const platformEnd = platformStart + element.length
commentAST.push({
start: platformStart,
end: platformEnd,
type: 'platform',
row: element,
})
})
}
else {
const start = self.indexOf(prefix) + index
const end = start + prefix.length
commentAST.push({
start,
end,
row: prefix,
type: 'prefix',
})
}
}
return commentAST
}
| src/parseComment/index.ts | uni-helper-uni-highlight-vscode-f9002ae | [
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": " const { start, end, type, row } = item\n const color = HIGHTLIGHT_COLOR.platform[row as Platform]\n if (type === 'prefix') {\n platformInfos.push({\n start,\n end,\n type,\n })\n }\n else if (type === 'platform' && color) {",
"score": 10.167352231242702
},
{
"filename": "src/transformPlatform.ts",
"retrieved_chunk": " const { start, end, row, color } = platformInfo\n const range = new Range(\n editor.document.positionAt(start),\n editor.document.positionAt(end),\n )\n if (platformInfo.type === 'prefix')\n highlightRange.prefix.push(range)\n if (platformInfo.type === 'platform') {\n if (!highlightRange.platform[color])\n highlightRange.platform[color] = []",
"score": 8.56515892069207
},
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": " platformInfos.push({\n start,\n end,\n type,\n color,\n })\n }\n else if (type === 'platform' && !color) {\n platformInfos.push({\n start,",
"score": 8.120658508385791
},
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": " start: number\n end: number\n type: 'prefix' | 'platform' | 'unPlatform'\n color: string\n}",
"score": 7.267614519306599
},
{
"filename": "src/getPlatformInfo.ts",
"retrieved_chunk": "import type { Platform } from './constants'\nimport { HIGHTLIGHT_COLOR } from './constants'\nimport { parseComment } from './parseComment'\nexport function getPlatformInfo(code: string): PlatformInfo[] {\n const commentAST = parseComment(code)\n if (!commentAST)\n return []\n const platformInfos = []\n for (let i = 0; i < commentAST.length; i++) {\n const item = commentAST[i]",
"score": 6.756868283758099
}
] | typescript | (element) => { |
import chalk from 'chalk';
import { stdout as output } from 'node:process';
import { OpenAIEmbeddings } from 'langchain/embeddings/openai';
import { HNSWLib } from 'langchain/vectorstores/hnswlib';
import { JSONLoader } from 'langchain/document_loaders/fs/json';
import { TextLoader } from 'langchain/document_loaders/fs/text';
import { PDFLoader } from 'langchain/document_loaders/fs/pdf';
import { DocxLoader } from 'langchain/document_loaders/fs/docx';
import { EPubLoader } from 'langchain/document_loaders/fs/epub';
import { CSVLoader } from 'langchain/document_loaders/fs/csv';
import ora from 'ora';
import { MarkdownTextSplitter, RecursiveCharacterTextSplitter } from 'langchain/text_splitter';
import { Document } from 'langchain/document';
import path from 'path';
import { YoutubeTranscript } from 'youtube-transcript';
import getDirectoryListWithDetails from '../utils/getDirectoryListWithDetails.js';
import createDirectory from '../utils/createDirectory.js';
import { getConfig, getDefaultOraOptions, getProjectRoot, setCurrentVectorStoreDatabasePath } from '../config/index.js';
import getDirectoryFiles from '../utils/getDirectoryFiles.js';
import WebCrawler from './crawler.js';
const projectRootDir = getProjectRoot();
const defaultOraOptions = getDefaultOraOptions(output);
/**
* This function loads and splits a file based on its extension using different loaders and text
* splitters.
* @param {string} filePath - A string representing the path to the file that needs to be loaded and
* split into documents.
* @returns The function `loadAndSplitFile` returns a Promise that resolves to an array of `Document`
* objects, where each `Document` represents a split portion of the input file. The type of the
* `Document` object is `Document<Record<string, unknown>>`, which means it has a generic type
* parameter that is an object with string keys and unknown values.
*/
async function loadAndSplitFile(filePath: string): Promise<Document<Record<string, unknown>>[]> {
const fileExtension = path.extname(filePath);
let loader;
let documents: Document<Record<string, unknown>>[];
switch (fileExtension) {
case '.json':
loader = new JSONLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.txt':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.md':
loader = new TextLoader(filePath);
documents = await loader.loadAndSplit(new MarkdownTextSplitter());
break;
case '.pdf':
loader = new PDFLoader(filePath, { splitPages: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.docx':
loader = new DocxLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.csv':
loader = new CSVLoader(filePath);
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
case '.epub':
loader = new EPubLoader(filePath, { splitChapters: false });
documents = await loader.loadAndSplit(new RecursiveCharacterTextSplitter());
break;
default:
throw new Error(`Unsupported file extension: ${fileExtension}`);
}
return documents;
}
/**
* This function loads or creates a vector store using HNSWLib and OpenAIEmbeddings.
* @returns The function `loadOrCreateVectorStore` returns a Promise that resolves to an instance of
* the `HNSWLib` class, which is a vector store used for storing and searching high-dimensional
* vectors.
*/
async function loadOrCreateVectorStore(): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
await createDirectory(getConfig().currentVectorStoreDatabasePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new Context Vector Store in the ${dbDirectory} directory`),
}).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const filesToAdd = await getDirectoryFiles(docsDirectory);
const documents = await Promise.all(filesToAdd.map((filePath) => loadAndSplitFile(filePath)));
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
vectorStore = await HNSWLib.fromDocuments(flattenedDocuments, new OpenAIEmbeddings({ maxConcurrency: 5 }));
await vectorStore.save(dbDirectory);
spinner.succeed();
}
return vectorStore;
}
const contextVectorStore = await loadOrCreateVectorStore();
const contextWrapper = {
contextInstance: contextVectorStore,
};
/**
* This function loads or creates a new empty Context Vector Store using HNSWLib and OpenAIEmbeddings.
* @returns a Promise that resolves to an instance of the HNSWLib class, which represents a
* hierarchical navigable small world graph used for nearest neighbor search. The instance is either
* loaded from an existing directory or created as a new empty Context Vector Store with specified
* parameters.
*/
async function loadOrCreateEmptyVectorStore(subDirectory: string): Promise<HNSWLib> {
let vectorStore: HNSWLib;
let spinner;
const newContextVectorStorePath = path.join(projectRootDir, process.env.VECTOR_STORE_BASE_DIR || 'db', subDirectory);
await createDirectory(newContextVectorStorePath);
setCurrentVectorStoreDatabasePath(newContextVectorStorePath);
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
vectorStore = await HNSWLib.load(dbDirectory, new OpenAIEmbeddings({ maxConcurrency: 5 }));
output.write(chalk.blue(`Using Context Vector Store in the ${dbDirectory} directory\n`));
} catch {
spinner = ora({
...defaultOraOptions,
text: chalk.blue(`Creating new empty Context Vector Store in the ${dbDirectory} directory`),
}).start();
vectorStore = new HNSWLib(new OpenAIEmbeddings({ maxConcurrency: 5 }), {
space: 'cosine',
numDimensions: 1536,
});
spinner.succeed();
output.write(
chalk.red.bold(
`\nThe Context Vector Store is currently empty and unsaved, add context to is using \`/add-docs\`, \`/add-url\` or \`/add-youtube\``
)
);
}
contextWrapper.contextInstance = vectorStore;
return vectorStore;
}
async function getContextVectorStore() {
return contextWrapper.contextInstance;
}
/**
* This function adds documents to a context vector store and saves them.
* @param {string[]} filePaths - The `filePaths` parameter is an array of strings representing the file
* paths of the documents that need to be added to the Context Vector Store.
* @returns nothing (`undefined`).
*/
async function addDocument(filePaths: string[]) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({ ...defaultOraOptions, text: `Adding files to the Context Vector Store` }).start();
const docsDirectory = path.join(projectRootDir, process.env.DOCS_DIR || 'docs');
const documents = await Promise.all(
filePaths.map((filePath) => loadAndSplitFile(path.join(docsDirectory, filePath)))
);
const flattenedDocuments = documents.reduce((acc, val) => acc.concat(val), []);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function adds a YouTube video transcript to a Context Vector Store.
* @param {string} URLOrVideoID - The URLOrVideoID parameter is a string that represents either the URL
* or the video ID of a YouTube video.
* @returns Nothing is being returned explicitly in the code, but the function is expected to return
* undefined after completing its execution.
*/
async function addYouTube(URLOrVideoID: string) {
let spinner;
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
try {
spinner = ora({
...defaultOraOptions,
text: `Adding Video transcript from ${URLOrVideoID} to the Context Vector Store`,
}).start();
const transcript = await YoutubeTranscript.fetchTranscript(URLOrVideoID);
const text = transcript.map((part) => part.text).join(' ');
const splitter = new RecursiveCharacterTextSplitter();
const videoDocs = await splitter.splitDocuments([
new Document({
pageContent: text,
}),
]);
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(videoDocs);
await vectorStore.save(dbDirectory);
spinner.succeed();
return;
} catch (error) {
if (spinner) {
spinner.fail(chalk.red(error));
} else {
output.write(chalk.red(error));
}
}
}
/**
* The function crawls a given URL, extracts text from the pages, splits the text into documents,
* generates embeddings for the documents, and saves them to a vector store.
* @param {string} URL - The URL of the website to crawl and extract text from.
* @param {string} selector - The selector parameter is a string that represents a CSS selector used to
* identify the HTML elements to be crawled on the web page. The WebCrawler will only crawl the
* elements that match the selector.
* @param {number} maxPages - The maximum number of pages to crawl for the given URL.
* @param {number} numberOfCharactersRequired - `numberOfCharactersRequired` is a number that specifies
* the minimum number of characters required for a document to be considered valid and used for
* generating embeddings. Any document with less than this number of characters will be discarded.
* @returns Nothing is being returned explicitly in the function, but it is implied that the function
* will return undefined if there are no errors.
*/
async function addURL(URL: string, selector: string, maxPages: number, numberOfCharactersRequired: number) {
const dbDirectory = getConfig().currentVectorStoreDatabasePath;
const addUrlSpinner = ora({ ...defaultOraOptions, text: `Crawling ${URL}` });
let documents;
try {
addUrlSpinner.start();
const progressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => {
addUrlSpinner.text = `Links found: ${linksFound} - Links crawled: ${linksCrawled} - Crawling ${currentUrl}`;
};
| const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired); |
const pages = (await crawler.start()) as Page[];
documents = await Promise.all(
pages.map((row) => {
const splitter = new RecursiveCharacterTextSplitter();
const webDocs = splitter.splitDocuments([
new Document({
pageContent: row.text,
}),
]);
return webDocs;
})
);
addUrlSpinner.succeed();
} catch (error) {
addUrlSpinner.fail(chalk.red(error));
}
if (documents) {
const generateEmbeddingsSpinner = ora({ ...defaultOraOptions, text: `Generating Embeddings` });
try {
const flattenedDocuments = documents.flat();
generateEmbeddingsSpinner.text = `Generating Embeddings for ${flattenedDocuments.length} documents`;
generateEmbeddingsSpinner.start();
const vectorStore = await getContextVectorStore();
await vectorStore.addDocuments(flattenedDocuments);
await vectorStore.save(dbDirectory);
generateEmbeddingsSpinner.succeed();
return;
} catch (error) {
generateEmbeddingsSpinner.fail(chalk.red(error));
}
}
}
async function listContextStores() {
const projectRoot = getProjectRoot(); // Please replace this with your actual function to get the project root
const vectorStoreDir = process.env.VECTOR_STORE_BASE_DIR || 'db';
const targetDir = path.join(projectRoot, vectorStoreDir);
const contextVectorStoresList = await getDirectoryListWithDetails(targetDir);
output.write(chalk.blue(`Context Vector Stores in ${targetDir}:\n\n`));
Object.entries(contextVectorStoresList).forEach(([dir, files]) => {
output.write(chalk.yellow(`Directory: ${dir}`));
if (dir === getConfig().currentVectorStoreDatabasePath) {
output.write(chalk.green(` (Currently selected)`));
}
output.write('\n');
files.forEach((file) => {
output.write(chalk.yellow(` File: ${file.name}, Size: ${file.size} KB\n`));
});
});
}
export { getContextVectorStore, addDocument, addURL, addYouTube, listContextStores, loadOrCreateEmptyVectorStore };
| src/lib/contextManager.ts | gmickel-memorybot-bad0302 | [
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": "import * as cheerio from 'cheerio';\nimport Crawler, { CrawlerRequestResponse } from 'crawler';\nimport { stderr } from 'node:process';\nimport resolveURL from '../utils/resolveURL.js';\n// import TurndownService from 'turndown';\n// const turndownService = new TurndownService();\ntype ProgressCallback = (linksFound: number, linksCrawled: number, currentUrl: string) => void;\ninterface Page {\n url: string;\n text: string;",
"score": 45.12222065234612
},
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": " */\nexport default function resolve(from: string, to: string) {\n const resolvedUrl = new URL(to, new URL(from, 'resolve://'));\n if (resolvedUrl.protocol === 'resolve:') {\n // `from` is a relative URL.\n const { pathname, search, hash } = resolvedUrl;\n return pathname + search + hash;\n }\n return resolvedUrl.toString();\n}",
"score": 29.700729544191283
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " selector: string;\n progressCallback: ProgressCallback;\n crawler: Crawler;\n constructor(\n urls: string[],\n progressCallback: ProgressCallback,\n selector = 'body',\n limit = 20,\n textLengthMinimum = 200\n ) {",
"score": 27.91461880451473
},
{
"filename": "src/utils/resolveURL.ts",
"retrieved_chunk": "/**\n * The function resolves a URL from a given base URL and returns the resolved URL as a string.\n * @param {string} from - The `from` parameter is a string representing the base URL that the `to`\n * parameter will be resolved against. It can be an absolute or relative URL.\n * @param {string} to - The `to` parameter is a string representing the URL that needs to be resolved.\n * It can be an absolute URL or a relative URL.\n * @returns The function `resolve` returns a string that represents the resolved URL. If the `to`\n * parameter is a relative URL, the function returns a string that represents the resolved URL relative\n * to the `from` parameter. If the `to` parameter is an absolute URL, the function returns a string\n * that represents the resolved URL.",
"score": 26.05223072159781
},
{
"filename": "src/lib/crawler.ts",
"retrieved_chunk": " const text = $(this.selector).text();\n // const text = turndownService.turndown(html || '');\n const page: Page = {\n url: res.request.uri.href,\n text,\n title,\n };\n if (text.length > this.textLengthMinimum) {\n this.pages.push(page);\n this.progressCallback(this.count + 1, this.pages.length, res.request.uri.href);",
"score": 25.65783960786886
}
] | typescript | const crawler = new WebCrawler([URL], progressCallback, selector, maxPages, numberOfCharactersRequired); |
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await | onDataReceived(data.toString("utf8"))
})
} |
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
await printFooter()
let code: number
if (succeed) {
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
| src/server/controller.ts | TeamKun-scenamatica-action-6f66283 | [
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": "export const deployPlugin = async (serverDir: string, pluginFile: string) => {\n const pluginDir = path.join(serverDir, \"plugins\")\n await io.mkdirP(pluginDir)\n await io.cp(pluginFile, pluginDir)\n}\nconst initScenamaticaConfig = async (configDir: string, scenamaticaVersion: string) => {\n const configPath = path.join(configDir, \"config.yml\")\n const configData = yaml.load(await fs.promises.readFile(configPath, \"utf8\")) as {\n interfaces?: {\n raw: boolean",
"score": 39.86661427783651
},
{
"filename": "src/main.ts",
"retrieved_chunk": " initPRMode(pullRequest, githubToken)\n }\n if (!fs.existsSync(pluginFile)) {\n setFailed(`Plugin file ${pluginFile} does not exist`)\n return\n }\n const paper = await deployServer(serverDir, javaVersion, mcVersion, scenamaticaVersion)\n info(\"Starting tests...\")\n await startTests(serverDir, paper, pluginFile)\n}",
"score": 37.68776777668632
},
{
"filename": "src/main.ts",
"retrieved_chunk": " const args: Args = getArguments()\n const { mcVersion,\n javaVersion,\n scenamaticaVersion,\n serverDir,\n pluginFile,\n githubToken\n } = args\n const pullRequest = context.payload.pull_request\n if (pullRequest) {",
"score": 21.857020774436116
},
{
"filename": "src/utils.ts",
"retrieved_chunk": "}\ninterface Args {\n mcVersion: string\n scenamaticaVersion: string\n serverDir: string\n pluginFile: string\n javaVersion: string\n githubToken: string\n failThreshold: number\n}",
"score": 19.11160164479143
},
{
"filename": "src/utils.ts",
"retrieved_chunk": "const getArguments = (): Args => {\n return {\n mcVersion: core.getInput(\"minecraft\") || \"1.16.5\",\n scenamaticaVersion: core.getInput(\"scenamatica\", ) || DEFAULT_SCENAMATICA_VERSION,\n serverDir: core.getInput(\"server-dir\") || \"server\",\n pluginFile: core.getInput(\"plugin\", { required: true }),\n javaVersion: core.getInput(\"java\") || \"17\",\n githubToken: core.getInput(\"github-token\") || process.env.GITHUB_TOKEN!,\n failThreshold: Number.parseInt(core.getInput(\"fail-threshold\"), 10) || 0,\n }",
"score": 15.608801257592189
}
] | typescript | onDataReceived(data.toString("utf8"))
})
} |
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await onDataReceived(data.toString("utf8"))
})
}
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
await | printFooter()
let code: number
if (succeed) { |
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
| src/server/controller.ts | TeamKun-scenamatica-action-6f66283 | [
{
"filename": "src/server/client.ts",
"retrieved_chunk": " incomingBuffer = messages.slice(1).join(\"\\n\") || undefined\n if (!await processPacket(messages[0]))\n info(messages[0])\n }\n}\nexport const kill = () => {\n alive = false\n}\nconst processPacket = async (msg: string) => {\n if (!alive) {",
"score": 13.508704125170052
},
{
"filename": "src/outputs/summary.ts",
"retrieved_chunk": "}\nconst printFooter = async () => {\n summary.addRaw(getFooter())\n await summary.write()\n}\nexport { printSummary, printErrorSummary, printFooter }",
"score": 13.169492442422106
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " }\n}\nexport const logSessionStart = (startedAt: number, tests: number): void => {\n info(\"--------------------------------------\")\n info(\" T E S T S\")\n info(\"--------------------------------------\")\n info(`The session is started at ${startedAt}, ${tests} tests are marked to be run.`)\n}\nexport const logSessionEnd = (sessionEnd: PacketSessionEnd): void => {\n const elapsed = `${Math.ceil((sessionEnd.finishedAt - sessionEnd.startedAt) / 1000)} sec`",
"score": 9.964064810764292
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": " // キャッシュの復元\n const cached = await restoreCache(dir, javaVersion, mcVersion, scenamaticaVersion)\n if (cached)\n return new Promise<string>((resolve) => {\n resolve(PAPER_NAME)\n })\n // キャッシュがないので Paper をビルドする。\n info(\"Building server...\")\n // Java のダウンロード\n if (!(await isJavaInstalled())) await downloadJava(dir, javaVersion)",
"score": 9.3631905753619
},
{
"filename": "src/outputs/messages.ts",
"retrieved_chunk": " ))\n return table\n}\nconst getSummaryHeader = (total: number, elapsed: number, passed: number, failures: number, skipped: number, cancelled: number) => {\n const threshold = getArguments().failThreshold\n let messageSource: string[]\n if (total === passed + skipped) messageSource = MESSAGES_PASSED\n else if (failures === 0) messageSource = MESSAGES_NO_TESTS\n else if (failures <= threshold) messageSource = MESSAGES_PASSED_WITH_THRESHOLD\n else messageSource = MESSAGES_FAILED",
"score": 8.793648505516519
}
] | typescript | printFooter()
let code: number
if (succeed) { |
import {isNoScenamatica} from "../utils.js"
import {deployPlugin} from "./deployer.js"
import {kill, onDataReceived} from "./client";
import type {ChildProcess} from "node:child_process";
import {spawn} from "node:child_process";
import type {Writable} from "node:stream";
import * as fs from "node:fs";
import path from "node:path";
import {info, setFailed, warning} from "@actions/core";
import {printFooter} from "../outputs/summary";
let serverProcess: ChildProcess | undefined
let serverStdin: Writable | undefined
const genArgs = (executable: string, args: string[]) => {
return [
...args,
"-jar",
executable,
"nogui"
]
}
const createServerProcess = (workDir: string, executable: string, args: string[] = []) => {
const cp = spawn(
"java",
genArgs(executable, args),
{
cwd: workDir
}
)
serverStdin = cp.stdin
serverProcess = cp
return cp
}
export const startServerOnly = async (workDir: string, executable: string, args: string[] = []) => {
info(`Starting server with executable ${executable} and args ${args.join(" ")}`)
const cp = createServerProcess(workDir, executable, args)
cp.stdout.on("data", (data: Buffer) => {
const line = data.toString("utf8")
if (line.includes("Done") && line.includes("For help, type \"help\""))
serverStdin?.write("stop\n")
if (line.endsWith("\n"))
info(line.slice(0, - 1))
else
info(line)
})
return new Promise<number>((resolve, reject) => {
cp.on("exit", (code) => {
if (code === 0)
resolve(code)
else
reject(code)
})
})
}
export const stopServer = () => {
if (!serverStdin || !serverProcess)
return
info("Stopping server...")
serverStdin.write("stop\n")
setTimeout(() => {
if (serverProcess!.killed)
return
warning("Server didn't stop in time, killing it...")
serverProcess?.kill("SIGKILL")
}, 1000 * 20)
}
export const startTests = async (serverDir: string, executable: string, pluginFile: string) => {
info(`Starting tests of plugin ${pluginFile}.`)
if (isNoScenamatica())
await removeScenamatica(serverDir)
await deployPlugin(serverDir, pluginFile)
const cp = createServerProcess(serverDir, executable)
cp.stdout.on("data", async (data: Buffer) => {
await onDataReceived(data.toString("utf8"))
})
}
const removeScenamatica = async (serverDir: string) => {
info("Removing Scenamatica from server...")
const pluginDir = path.join(serverDir, "plugins")
const files = await fs.promises.readdir(pluginDir)
for (const file of files) {
if (file.includes("Scenamatica") && file.endsWith(".jar")) {
info(`Removing ${file}...`)
await fs.promises.rm(path.join(pluginDir, file))
}
}
}
export const endTests = async (succeed: boolean) => {
info("Ending tests, shutting down server...")
kill()
stopServer()
| await printFooter()
let code: number
if (succeed) { |
info("Tests succeeded")
code = 0
} else {
setFailed("Tests failed")
code = 1
}
process.exit(code)
}
| src/server/controller.ts | TeamKun-scenamatica-action-6f66283 | [
{
"filename": "src/server/client.ts",
"retrieved_chunk": " incomingBuffer = messages.slice(1).join(\"\\n\") || undefined\n if (!await processPacket(messages[0]))\n info(messages[0])\n }\n}\nexport const kill = () => {\n alive = false\n}\nconst processPacket = async (msg: string) => {\n if (!alive) {",
"score": 13.508704125170052
},
{
"filename": "src/outputs/summary.ts",
"retrieved_chunk": "}\nconst printFooter = async () => {\n summary.addRaw(getFooter())\n await summary.write()\n}\nexport { printSummary, printErrorSummary, printFooter }",
"score": 13.169492442422106
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " }\n}\nexport const logSessionStart = (startedAt: number, tests: number): void => {\n info(\"--------------------------------------\")\n info(\" T E S T S\")\n info(\"--------------------------------------\")\n info(`The session is started at ${startedAt}, ${tests} tests are marked to be run.`)\n}\nexport const logSessionEnd = (sessionEnd: PacketSessionEnd): void => {\n const elapsed = `${Math.ceil((sessionEnd.finishedAt - sessionEnd.startedAt) / 1000)} sec`",
"score": 9.964064810764292
},
{
"filename": "src/server/deployer.ts",
"retrieved_chunk": " // キャッシュの復元\n const cached = await restoreCache(dir, javaVersion, mcVersion, scenamaticaVersion)\n if (cached)\n return new Promise<string>((resolve) => {\n resolve(PAPER_NAME)\n })\n // キャッシュがないので Paper をビルドする。\n info(\"Building server...\")\n // Java のダウンロード\n if (!(await isJavaInstalled())) await downloadJava(dir, javaVersion)",
"score": 9.3631905753619
},
{
"filename": "src/outputs/messages.ts",
"retrieved_chunk": " ))\n return table\n}\nconst getSummaryHeader = (total: number, elapsed: number, passed: number, failures: number, skipped: number, cancelled: number) => {\n const threshold = getArguments().failThreshold\n let messageSource: string[]\n if (total === passed + skipped) messageSource = MESSAGES_PASSED\n else if (failures === 0) messageSource = MESSAGES_NO_TESTS\n else if (failures <= threshold) messageSource = MESSAGES_PASSED_WITH_THRESHOLD\n else messageSource = MESSAGES_FAILED",
"score": 8.793648505516519
}
] | typescript | await printFooter()
let code: number
if (succeed) { |
import {extractTestResults, getArguments} from "../utils";
import type {PacketTestEnd} from "../packets";
import {getEmojiForCause} from "../logging";
const MESSAGES_PASSED = [
":tada: Congrats! All tests passed! :star2:",
":raised_hands: High-five! You nailed all the tests! :tada::tada:",
":confetti_ball: Hooray! Everything's working perfectly! :tada::confetti_ball:",
":100: Perfect score! All tests passed with flying colors! :rainbow::clap:",
":thumbsup: Great job! All tests passed without a hitch! :rocket::star2:",
":metal: Rock on! All tests passed flawlessly! :guitar::metal:",
":partying_face: Celebrate good times! All tests passed with flying colors! :tada::confetti_ball::balloon:",
":muscle: You crushed it! All tests passed with ease! :fire::muscle:",
":1st_place_medal: Gold medal performance! All tests passed with flying colors! :1st_place_medal::star2:",
":champagne: Pop the champagne! All tests passed, time to celebrate! :champagne::tada:"
];
const MESSAGES_NO_TESTS = [
"Alright, who forgot to write tests? :face_with_raised_eyebrow:",
"No tests? Time to break out the crystal ball. :crystal_ball:",
"Tests? Who writes tests? :person_shrugging:",
"No tests found. Did they run away? :man_running: :woman_running:",
"No tests, no glory. :trophy:",
"Tests? We don't need no stinkin' tests! :shushing_face:",
"No tests? I guess we'll just have to wing it. :eagle:",
"You get a test, and you get a test! Everybody gets a test! :gift: :tada:",
"No tests? That's impossible! :dizzy_face:",
"Tests make the code go round. :carousel_horse:"
];
const MESSAGES_FAILED = [
"Oops! Something went wrong! :scream_cat:",
"Oh no! The tests have betrayed us! :scream:",
"Houston, we have a problem. :rocket:",
"Looks like we have some debugging to do. :beetle:",
"Failures? More like opportunities to improve! :muscle:",
"This is not the result we were looking for. :confused:",
"Looks like we need to rethink our strategy. :thinking:",
"Don't worry, we'll get 'em next time! :sunglasses:",
"Keep calm and debug on. :female_detective:",
"The only way is up from here! :rocket:"
];
const MESSAGES_PASSED_WITH_THRESHOLD = [
"Tests passed, but some are being rebellious. Debug mode: ON! :microscope:",
"Almost there! Some tests failed, but hey, progress is progress! :turtle:",
"Good news: most tests passed. Bad news: a few had different plans. Let's fix 'em! :hammer:",
"We're on the right track, but some tests are playing hard to get. Challenge accepted! :muscle:",
"Tests went well overall, but we have a few stubborn failures. Time for some gentle persuasion! :wrench:",
"Success with a side of failures. It's like a bittersweet symphony. Let's sweeten it up! :musical_note:",
"We're soaring high, but some tests got left behind. Time to reel them back in! :fishing_pole_and_fish:",
"Great progress, but we've got some test gremlins causing trouble. Let's send them packing! :imp:",
"Victory is ours, with a sprinkle of defeat. Let's conquer those pesky failures! :crossed_swords:",
"We're almost there, but a few tests are being rebellious. Let's bring them back to the flock! :sheep:"
];
const REPORT_URL = "https://github.com/TeamKun/Scenamatica/issues/new?assignees=PeyaPeyaPeyang&labels=Type%3A+Bug&projects=&template=bug_report.yml&title=%E3%80%90%E3%83%90%E3%82%B0%E3%80%91"
export const getHeader = (isError: boolean) => {
const result = [ wrap("h1", "Scenamatica"), wrap("h2", "Summary"), "<hr />"]
if (isError) {
result.push(
wrap("h4", ":no_entry: ERROR!!"),
wrap("p", "An unexpected error occurred while running the server and Scenamatica daemon."),
wrap("h2", "Error details")
)
}
return joinLine(...result)
}
export const getRunningMessage = () => {
const messages = [
wrap("h4", ":hourglass_flowing_sand: Hey there! :wave: We're currently testing your plugin."),
wrap("p", "The testing process may take some time, but we'll update this message once it's complete.")
]
return joinLine(...messages)
}
| export const getTestSummary = (results: PacketTestEnd[], startedAt: number, finishedAt: number) => { |
const elapsed = (finishedAt - startedAt) / 1000
const {
total,
passed,
failures,
skipped,
cancelled
} = extractTestResults(results)
return joinLine(
getSummaryHeader(total, elapsed, passed, failures, skipped, cancelled),
"<hr />",
wrap("h2", "Details")
)
}
export const getTestResultTable = (results: PacketTestEnd[], minimize = false) => {
const header = wrap("thead", joinLine(
wrap("tr", joinLine(
wrap("th", " "),
wrap("th", "Test"),
wrap("th", "Cause"),
wrap("th", "State"),
wrap("th", "Started at"),
wrap("th", "Finished at"),
wrap("th", "Elapsed"),
wrap("th", "Test description")
))
)
)
const body = wrap("tbody", joinLine(...results.map((result) => {
const {
cause,
state,
scenario,
startedAt,
finishedAt
} = result
const emoji = getEmojiForCause(cause)
const { name } = scenario
const startedAtStr = new Date(startedAt).toLocaleString()
const finishedAtStr = new Date(finishedAt).toLocaleString()
const testElapsed = `${Math.ceil((finishedAt - startedAt) / 1000)} sec`
const description = scenario.description || "No description"
return wrap("tr", joinLine(
wrap("td", emoji),
wrap("td", name),
wrap("td", cause),
wrap("td", state),
wrap("td", startedAtStr),
wrap("td", finishedAtStr),
wrap("td", testElapsed),
wrap("td", description)
))
}))
)
const table = wrap("table", joinLine(header, body))
if (minimize)
return wrap("details", joinLine(
wrap("summary", "Full test results"),
table
))
return table
}
const getSummaryHeader = (total: number, elapsed: number, passed: number, failures: number, skipped: number, cancelled: number) => {
const threshold = getArguments().failThreshold
let messageSource: string[]
if (total === passed + skipped) messageSource = MESSAGES_PASSED
else if (failures === 0) messageSource = MESSAGES_NO_TESTS
else if (failures <= threshold) messageSource = MESSAGES_PASSED_WITH_THRESHOLD
else messageSource = MESSAGES_FAILED
const summaryText = messageSource[Math.floor(Math.random() * messageSource.length)]
return joinLine(
wrap("h4", summaryText),
"<br />",
wrap("p", join(", ",
`Tests run: ${total}`,
`Failures: ${failures}`,
`Skipped: ${skipped}`,
`Cancelled: ${cancelled}`,
`Time elapsed: ${elapsed} sec`
))
)
}
export const getExceptionString = (errorType: string, errorMessage: string, errorStackTrace: string[]) => {
return wrap("pre", wrap("code", joinLine(
"An unexpected error has occurred while running Scenamatica daemon:",
`${errorType}: ${errorMessage}`,
...errorStackTrace.map((s) => ` at ${s}`)
)
))
}
export const getReportingMessage = () => {
return joinLine(
wrap("h2", "Reporting bugs"),
wrap("p", combine(
"If you believe this is a bug, please report it to ",
wrap("a", "Scenamatica", { href: REPORT_URL }),
" along with the contents of this error message, the above stack trace, and the environment information listed below."
)),
getEnvInfoMessage()
)
}
export const getFooter = () => {
return joinLine(
"<hr />",
getLicenseMessage()
)
}
const getEnvInfoMessage = () => {
const runArgs = getArguments()
const envInfo = [
"+ Versions:",
` - Scenamatica: ${runArgs.scenamaticaVersion}`,
` - Minecraft: ${runArgs.mcVersion}`,
` - Java: ${runArgs.javaVersion}`,
` - Node.js: ${process.version}`,
"+ Runner:",
` - OS: ${process.platform}`,
` - Arch: ${process.arch}`,
]
return wrap("details", joinLine(
wrap("summary", "Environment Information"),
wrap("pre", wrap("code", envInfo.join("\n")))
))
}
const getLicenseMessage = () => {
return joinLine(
wrap("h2" , "License"),
wrap("small", `This test report has been generated by ${
wrap("a", "Scenamatica", { href: "https://github.com/TeamKUN/Scenamatica" })
} and licensed under ${
wrap("a", "MIT License", { href: "https://github.com/TeamKUN/Scenamatica/blob/main/LICENSE" })
}.`),
"<br />",
wrap("small", "You can redistribute it and/or modify it under the terms of the MIT License.")
)
}
const wrap = (tag: string, text: string, props: { [key: string]: string } = {}) => {
const attributes = Object.entries(props).map(([key, value]) => `${key}="${value}"`).join(" ")
return `<${tag} ${attributes}>${text}</${tag}>`
}
const joinLine = (...texts: string[]) => {
return texts.join("\n")
}
const join = (delimiter: string, ...texts: string[]) => {
return texts.join(delimiter)
}
const combine = (...texts: string[]) => {
return texts.join("")
}
| src/outputs/messages.ts | TeamKun-scenamatica-action-6f66283 | [
{
"filename": "src/outputs/pull-request/appender.ts",
"retrieved_chunk": "}\nexport const reportSessionEnd = (packet: PacketSessionEnd) => {\n const {results, finishedAt, startedAt} = packet\n appendHeaderIfNotPrinted()\n outMessage += `${getTestSummary(results, startedAt, finishedAt)}\n ${getTestResultTable(results, true)}\n `\n}\nconst appendHeaderIfNotPrinted = () => {\n if (!headerPrinted) {",
"score": 19.260350831742443
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " cause: TestResultCause,\n startedAt: number,\n finishedAt: number,\n): void => {\n const elapsed = `${finishedAt - startedAt} ms`\n const emoji = getEmojiForCause(cause)\n switch (cause) {\n case TestResultCause.CANCELLED: {\n info(`${emoji} The test ${name} is cancelled with state ${state} in ${elapsed}.`)\n break",
"score": 15.558092982399511
},
{
"filename": "src/logging.ts",
"retrieved_chunk": " }\n}\nexport const logSessionStart = (startedAt: number, tests: number): void => {\n info(\"--------------------------------------\")\n info(\" T E S T S\")\n info(\"--------------------------------------\")\n info(`The session is started at ${startedAt}, ${tests} tests are marked to be run.`)\n}\nexport const logSessionEnd = (sessionEnd: PacketSessionEnd): void => {\n const elapsed = `${Math.ceil((sessionEnd.finishedAt - sessionEnd.startedAt) / 1000)} sec`",
"score": 15.521661808315775
},
{
"filename": "src/server/client.ts",
"retrieved_chunk": " incomingBuffer = messages.slice(1).join(\"\\n\") || undefined\n if (!await processPacket(messages[0]))\n info(messages[0])\n }\n}\nexport const kill = () => {\n alive = false\n}\nconst processPacket = async (msg: string) => {\n if (!alive) {",
"score": 15.482235187864251
},
{
"filename": "src/packets.ts",
"retrieved_chunk": " public results: PacketTestEnd[],\n public startedAt: number,\n public finishedAt: number,\n ) {}\n}\nexport class PacketScenamaticaError implements Packet<PacketScenamaticaError> {\n public genre = \"general\"\n public type = \"error\"\n public constructor(public date: number, public exception: string, public message: string, public stackTrace: string[]) {}\n}",
"score": 15.334279350273114
}
] | typescript | export const getTestSummary = (results: PacketTestEnd[], startedAt: number, finishedAt: number) => { |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
< | GitHubIcon />{" "} |
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " >\n fal-serverless\n </a>\n </span>\n </div>\n <div className=\"flex\">\n <a\n href=\"https://github.com/fal-ai/edit-anything-app\"\n target=\"_blank\"\n className=\"opacity-40 hover:opacity-70 dark:opacity-60 dark:hover:opacity-90 transition-opacity duration-200 pe-2 md:pe-0\"",
"score": 30.107977465593404
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": "import GitHubIcon from \"@/components/GitHubIcon\";\nexport default function Footer() {\n return (\n <footer className=\"footer footer-center p-4 container mx-auto gap-2 md:gap-4\">\n <div className=\"prose\">\n <p>\n Copyright © 2023 - All right reserved -{\" \"}\n <a href=\"https://fal.ai\" className=\"link\" target=\"_blank\">\n fal.ai\n </a>",
"score": 28.033751952356493
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <strong>Hint:</strong> click on the image to set the\n mask reference point\n </span>\n )}\n <button\n className=\"btn btn-outline btn-sm self-end\"\n onClick={reset}\n disabled={isLoading}\n >\n Reset",
"score": 20.036630087751178
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " onSelect={handleModelSelected}\n selectedModel={selectedModel}\n />\n </div>\n <div className=\"hidden md:flex items-end justify-end\">\n <button\n className=\"btn btn-outline\"\n onClick={() => setShowModelDetails(true)}\n >\n <CodeBracketIcon className=\"h-6 w-6\" />",
"score": 18.640134278845448
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": " className=\"opacity-40 hover:opacity-70\"\n target=\"_blank\"\n >\n <GitHubIcon />\n </a>\n </div>\n </footer>\n );\n}",
"score": 18.188492689732897
}
] | typescript | GitHubIcon />{" "} |
import NextImage from "next/image";
import Card from "./Card";
import EmptyMessage from "./EmptyMessage";
interface StableDiffusionButtonGroupProps {
setActiveTab: (tab: string) => void;
activeTab: string;
}
export const StableDiffusionOptionsButtonGroup = (
props: StableDiffusionButtonGroupProps
) => {
const { setActiveTab, activeTab } = props;
const tabClass = (tabName: string) =>
props.activeTab === tabName ? "btn-primary" : "";
return (
<div className="max-md:px-2 flex container mx-auto pt-8 w-full">
<div className="join">
<button
onClick={() => setActiveTab("replace")}
className={`btn ${tabClass("replace")} join-item`}
>
Replace
</button>
<button
onClick={() => setActiveTab("remove")}
className={`btn ${tabClass("remove")} join-item`}
>
Remove
</button>
<button
onClick={() => setActiveTab("fill")}
className={`btn ${tabClass("fill")} join-item`}
>
Fill
</button>
</div>
</div>
);
};
interface StableDiffusionInputProps {
setActiveTab: (tab: string) => void;
activeTab: string;
setPrompt: (prompt: string) => void;
setFillPrompt: (prompt: string) => void;
prompt: string;
fillPrompt: string;
isLoading: boolean;
selectedMask: string | null;
hasPrompt: boolean | string;
hasFillPrompt: boolean | string;
handleReplace: () => void;
handleRemove: () => void;
handleFill: () => void;
replacedImageUrls: string[];
removedImageUrls: string[];
filledImageUrls: string[];
}
export const StableDiffusionInput = (props: StableDiffusionInputProps) => {
const {
activeTab,
setActiveTab,
setPrompt,
prompt,
fillPrompt,
hasFillPrompt,
isLoading,
handleReplace,
handleRemove,
handleFill,
setFillPrompt,
selectedMask,
hasPrompt,
replacedImageUrls,
removedImageUrls,
filledImageUrls,
} = props;
return (
<div>
<StableDiffusionOptionsButtonGroup
activeTab={activeTab}
setActiveTab={setActiveTab}
/>
{activeTab === "replace" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Replace...">
<div className="flex flex-col md:flex-row md:space-x-6">
<div className="form-control w-full md:w-3/5 max-w-full">
<label>
<input
id="prompt_input"
type="text"
name="prompt"
value={prompt}
onChange={(e) => setPrompt(e.target.value)}
placeholder="something creative, like 'a bus on the moon'"
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
</div>
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask || !hasPrompt}
onClick={handleReplace}
>
{selectedMask ? "Generate" : "Pick one of the mask options"}
</button>
</div>
{replacedImageUrls.length === 0 && (
<div className="my-12">
| <EmptyMessage message="Nothing to see just yet" />
</div>
)} |
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{replacedImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
{activeTab === "remove" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Remove...">
<div className="flex flex-col md:flex-row md:space-x-6">
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask}
onClick={handleRemove}
>
{selectedMask ? "Remove" : "Pick one of the mask options"}
</button>
</div>
{removedImageUrls.length === 0 && (
<div className="my-12">
<EmptyMessage message="Nothing to see just yet" />
</div>
)}
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{removedImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
{activeTab === "fill" && (
<div className="container mx-auto pt-8 w-full">
<Card title="Fill...">
<div className="flex flex-col md:flex-row md:space-x-6">
<div className="form-control w-full md:w-3/5 max-w-full">
<label>
<input
id="fill_prompt_input"
type="text"
name="fill_prompt"
value={fillPrompt}
onChange={(e) => setFillPrompt(e.target.value)}
placeholder="something creative, like 'an alien'"
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
</div>
<button
className="btn btn-primary max-sm:btn-wide mt-4 mx-auto md:mx-0 md:mt-0"
disabled={isLoading || !selectedMask || !hasFillPrompt}
onClick={handleFill}
>
{selectedMask ? "Fill" : "Pick one of the mask options"}
</button>
</div>
{filledImageUrls.length === 0 && (
<div className="my-12">
<EmptyMessage message="Nothing to see just yet" />
</div>
)}
<div className="grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto">
{filledImageUrls.map((url, index) => (
<NextImage
key={index}
src={url}
alt={`Generated Image ${index + 1}`}
width={0}
height={0}
sizes="100vw"
style={{ width: "100%", height: "auto" }}
className="my-0"
/>
))}
</div>
</Card>
</div>
)}
</div>
);
};
| src/components/StableDiffusion.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " value={dilation}\n onChange={(e) => setDilation(parseInt(e.target.value))} // @ts-nocheck\n className=\"input placeholder-gray-400 dark:placeholder-gray-600 w-full\"\n disabled={isLoading}\n />\n </label>\n {displayMasks.length === 0 && (\n <div className=\"items-center mt-0 md:mt-12\">\n <div className=\"hidden md:display\">\n <EmptyMessage message=\"No masks generated yet\" />",
"score": 20.641375628024985
},
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " </div>\n <div className=\"flex flex-col items-center\">\n <button\n className=\"btn btn-primary max-sm:btn-wide mb-4 md:mb-0\"\n disabled={isLoading || !selectedImage || !position}\n onClick={generateMasks}\n >\n {position ? \"Generate masks\" : \"Set the mask reference point\"}\n </button>\n </div>",
"score": 20.28000620831363
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " handleReplace={handleReplace}\n handleRemove={handleRemove}\n handleFill={handleFill}\n setFillPrompt={setFillPrompt}\n selectedMask={selectedMask}\n hasPrompt={hasPrompt}\n replacedImageUrls={replacedImageUrls}\n removedImageUrls={removedImageUrls}\n filledImageUrls={filledImageUrls}\n />",
"score": 18.226658493750975
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <strong>Hint:</strong> click on the image to set the\n mask reference point\n </span>\n )}\n <button\n className=\"btn btn-outline btn-sm self-end\"\n onClick={reset}\n disabled={isLoading}\n >\n Reset",
"score": 14.947349109769643
},
{
"filename": "src/components/MaskPicker.tsx",
"retrieved_chunk": " </div>\n )}\n {displayMasks.length > 0 && (\n <>\n {props.selectedModel.id === \"sam\" && (\n <span className=\"font-light mb-0 inline-block opacity-70\">\n <strong>Hint:</strong> click on the image select a mask\n </span>\n )}\n <div className=\"grid grid-cols-1 space-y-2\">",
"score": 14.682443629283538
}
] | typescript | <EmptyMessage message="Nothing to see just yet" />
</div>
)} |
import { ImageFile } from "@/data/image";
import { Model } from "@/data/modelMetadata";
import { PropsWithChildren } from "react";
import Card from "./Card";
import EmptyMessage from "./EmptyMessage";
import ImageMask from "./ImageMask";
export interface MaskPickerProps {
displayMasks: string[];
masks: string[];
dilation: number;
isLoading: boolean;
setDilation: (dilation: number) => void;
selectedImage: ImageFile | null;
position: { x: number; y: number } | null;
generateMasks: () => void;
selectedMask: string | null;
handleMaskSelected: (mask: string) => void;
selectedModel: Model;
}
export default function MaskPicker(props: PropsWithChildren<MaskPickerProps>) {
const {
displayMasks,
masks,
dilation,
isLoading,
setDilation,
selectedImage,
position,
generateMasks,
selectedMask,
handleMaskSelected,
} = props;
return (
<Card title="Masks" classNames="min-h-full">
<label>
Dilation:
<input
id="mask_dilation"
type="number"
name="dilation"
value={dilation}
onChange={(e) => setDilation(parseInt(e.target.value))} // @ts-nocheck
className="input placeholder-gray-400 dark:placeholder-gray-600 w-full"
disabled={isLoading}
/>
</label>
{displayMasks.length === 0 && (
<div className="items-center mt-0 md:mt-12">
<div className="hidden md:display">
<EmptyMessage message="No masks generated yet" />
</div>
<div className="flex flex-col items-center">
<button
className="btn btn-primary max-sm:btn-wide mb-4 md:mb-0"
disabled={isLoading || !selectedImage || !position}
onClick={generateMasks}
>
{position ? "Generate masks" : "Set the mask reference point"}
</button>
</div>
</div>
)}
{displayMasks.length > 0 && (
<>
{props.selectedModel.id === "sam" && (
<span className="font-light mb-0 inline-block opacity-70">
<strong>Hint:</strong> click on the image select a mask
</span>
)}
<div className="grid grid-cols-1 space-y-2">
{displayMasks.map((mask, index) => (
< | ImageMask
key={index} |
alt={`Mask ${index}`}
mask={mask}
selected={selectedMask === mask}
onClick={handleMaskSelected}
/>
))}
</div>
<button
className="btn btn-primary max-sm:btn-wide mb-4 md:mb-0"
disabled={isLoading || !selectedImage || !position}
onClick={generateMasks}
>
{position ? "Regenerate" : "Set the mask reference point"}
</button>
</>
)}
</Card>
);
}
| src/components/MaskPicker.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <ImageSelector\n onImageSelect={handleImageSelected}\n disabled={isLoading}\n />\n )}\n {selectedImage && (\n <>\n <div className=\"flex justify-between\">\n {selectedModel.id === \"sam\" && (\n <span className=\"font-light mb-0 inline-block opacity-70\">",
"score": 45.294744591010314
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <strong>Hint:</strong> click on the image to set the\n mask reference point\n </span>\n )}\n <button\n className=\"btn btn-outline btn-sm self-end\"\n onClick={reset}\n disabled={isLoading}\n >\n Reset",
"score": 43.594703227547654
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " </div>\n )}\n <div className=\"grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto\">\n {replacedImageUrls.map((url, index) => (\n <NextImage\n key={index}\n src={url}\n alt={`Generated Image ${index + 1}`}\n width={0}\n height={0}",
"score": 37.34372938865635
},
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " <span className=\"whitespace-nowrap dark:text-white font-light\">\n edit\n <span className=\"text-secondary font-normal\">anything</span>\n </span>\n </a>\n <span className=\"text-xs md:text-sm font-light md:ms-1 max-md:ps-2 inline-block\">\n <span className=\"opacity-70\">model inference provided by </span>\n <a\n className=\"link font-medium opacity-70 hover:opacity-100 transition-opacity duration-200\"\n href=\"https://docs.fal.ai/fal-serverless/quickstart\"",
"score": 35.70953702663756
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " <div className=\"my-12\">\n <EmptyMessage message=\"Nothing to see just yet\" />\n </div>\n )}\n <div className=\"grid grid-cols-1 gap-4 mt-4 md:mt-6 lg:p-12 mx-auto\">\n {filledImageUrls.map((url, index) => (\n <NextImage\n key={index}\n src={url}\n alt={`Generated Image ${index + 1}`}",
"score": 33.59563491934483
}
] | typescript | ImageMask
key={index} |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
| return model.curlCode; |
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/components/ModelPicker.tsx",
"retrieved_chunk": " <span className=\"label-text\">Select a model</span>\n </label>\n <select\n className=\"select select-bordered max-w-xs\"\n onChange={handleOnModelSelect}\n value={selectedModel.id}\n >\n {Object.values(models).map((model) => (\n <option key={model.id} value={model.id}>\n {model.name}",
"score": 20.58224972827901
},
{
"filename": "src/data/modelMetadata.ts",
"retrieved_chunk": ")\n `,\n jsCode: \"\",\n curlCode: \"\",\n};\nconst segmentAnything: Model = {\n id: \"sam\",\n name: \"Segment Anything\",\n apiEndpoint: process.env.NEXT_PUBLIC_MASK_FUNCTION_URL || \"\",\n pythonCode: `",
"score": 14.732036710502225
},
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": "import GitHubIcon from \"@/components/GitHubIcon\";\nexport default function NavBar() {\n return (\n <div className=\"navbar bg-base-300\">\n <div className=\"container mx-auto\">\n <div className=\"flex-1 max-md:flex-col\">\n <a\n className=\"text-lg normal-case tracking-wide ps-2 md:ps-0\"\n href=\"#\"\n >",
"score": 14.489493341245858
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " hasPrompt,\n replacedImageUrls,\n removedImageUrls,\n filledImageUrls,\n } = props;\n return (\n <div>\n <StableDiffusionOptionsButtonGroup\n activeTab={activeTab}\n setActiveTab={setActiveTab}",
"score": 13.618794577242777
},
{
"filename": "src/data/modelMetadata.ts",
"retrieved_chunk": "export type Model = {\n id: string;\n name: string;\n apiEndpoint: string;\n pythonCode: string;\n jsCode: string;\n curlCode: string;\n};\nconst regmbModel: Model = {\n id: \"rembg\",",
"score": 13.4572033983033
}
] | typescript | return model.curlCode; |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
| <GitHubIcon />{" "} |
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " >\n fal-serverless\n </a>\n </span>\n </div>\n <div className=\"flex\">\n <a\n href=\"https://github.com/fal-ai/edit-anything-app\"\n target=\"_blank\"\n className=\"opacity-40 hover:opacity-70 dark:opacity-60 dark:hover:opacity-90 transition-opacity duration-200 pe-2 md:pe-0\"",
"score": 32.001329020765546
},
{
"filename": "src/pages/_footer.tsx",
"retrieved_chunk": "import GitHubIcon from \"@/components/GitHubIcon\";\nexport default function Footer() {\n return (\n <footer className=\"footer footer-center p-4 container mx-auto gap-2 md:gap-4\">\n <div className=\"prose\">\n <p>\n Copyright © 2023 - All right reserved -{\" \"}\n <a href=\"https://fal.ai\" className=\"link\" target=\"_blank\">\n fal.ai\n </a>",
"score": 31.943503761157398
},
{
"filename": "src/pages/_navbar.tsx",
"retrieved_chunk": " <span className=\"whitespace-nowrap dark:text-white font-light\">\n edit\n <span className=\"text-secondary font-normal\">anything</span>\n </span>\n </a>\n <span className=\"text-xs md:text-sm font-light md:ms-1 max-md:ps-2 inline-block\">\n <span className=\"opacity-70\">model inference provided by </span>\n <a\n className=\"link font-medium opacity-70 hover:opacity-100 transition-opacity duration-200\"\n href=\"https://docs.fal.ai/fal-serverless/quickstart\"",
"score": 21.376962769713117
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " onSelect={handleModelSelected}\n selectedModel={selectedModel}\n />\n </div>\n <div className=\"hidden md:flex items-end justify-end\">\n <button\n className=\"btn btn-outline\"\n onClick={() => setShowModelDetails(true)}\n >\n <CodeBracketIcon className=\"h-6 w-6\" />",
"score": 20.990436322017867
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <strong>Hint:</strong> click on the image to set the\n mask reference point\n </span>\n )}\n <button\n className=\"btn btn-outline btn-sm self-end\"\n onClick={reset}\n disabled={isLoading}\n >\n Reset",
"score": 20.808422608725643
}
] | typescript | <GitHubIcon />{" "} |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
navigator.clipboard.writeText(model.apiEndpoint);
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
| return model.pythonCode; |
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": "import NextImage from \"next/image\";\nimport Card from \"./Card\";\nimport EmptyMessage from \"./EmptyMessage\";\ninterface StableDiffusionButtonGroupProps {\n setActiveTab: (tab: string) => void;\n activeTab: string;\n}\nexport const StableDiffusionOptionsButtonGroup = (\n props: StableDiffusionButtonGroupProps\n) => {",
"score": 23.468666366393514
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " hasPrompt,\n replacedImageUrls,\n removedImageUrls,\n filledImageUrls,\n } = props;\n return (\n <div>\n <StableDiffusionOptionsButtonGroup\n activeTab={activeTab}\n setActiveTab={setActiveTab}",
"score": 17.340284552188916
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " className={`btn ${tabClass(\"fill\")} join-item`}\n >\n Fill\n </button>\n </div>\n </div>\n );\n};\ninterface StableDiffusionInputProps {\n setActiveTab: (tab: string) => void;",
"score": 16.355178374527277
},
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " <div>\n {selectedModel.id === \"sam\" && (\n <StableDiffusionInput\n activeTab={activeTab}\n setActiveTab={setActiveTab}\n setPrompt={setPrompt}\n prompt={prompt}\n fillPrompt={fillPrompt}\n hasFillPrompt={hasFillPrompt}\n isLoading={isLoading}",
"score": 13.634572084262423
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " const { setActiveTab, activeTab } = props;\n const tabClass = (tabName: string) =>\n props.activeTab === tabName ? \"btn-primary\" : \"\";\n return (\n <div className=\"max-md:px-2 flex container mx-auto pt-8 w-full\">\n <div className=\"join\">\n <button\n onClick={() => setActiveTab(\"replace\")}\n className={`btn ${tabClass(\"replace\")} join-item`}\n >",
"score": 13.179082839766824
}
] | typescript | return model.pythonCode; |
import {
DocumentDuplicateIcon as CopyIcon,
InformationCircleIcon as InfoIcon,
} from "@heroicons/react/24/outline";
import va from "@vercel/analytics";
import {
PropsWithChildren,
useCallback,
useEffect,
useMemo,
useState,
} from "react";
import { Prism as SyntaxHighlighter } from "react-syntax-highlighter";
import { Model } from "../data/modelMetadata";
import GitHubIcon from "./GitHubIcon";
export interface ModelCardProps {
visible: boolean;
onDismiss: () => void;
model: Model;
}
type Tabs = "python" | "js" | "curl";
export default function ModelCard(props: PropsWithChildren<ModelCardProps>) {
const { model, onDismiss, visible } = props;
const [activeTab, setActiveTab] = useState<Tabs>("python");
const selectTab = (tab: Tabs) => () => {
setActiveTab(tab);
};
const [style, setStyle] = useState({});
useEffect(() => {
import("react-syntax-highlighter/dist/esm/styles/prism/material-dark").then(
(mod) => setStyle(mod.default)
);
});
const modalClassName = [
"modal max-md:w-full max-md:modal-bottom",
visible ? "modal-open" : "",
];
const copyEndpoint = useCallback(() => {
| navigator.clipboard.writeText(model.apiEndpoint); |
}, [model.apiEndpoint]);
const selectOnClick = useCallback(
(event: React.MouseEvent<HTMLInputElement>) => {
event.currentTarget.select();
},
[]
);
const isTabSelected = useCallback(
(tab: Tabs) => {
return activeTab === tab ? "tab-active" : "";
},
[activeTab]
);
const code = useMemo(() => {
switch (activeTab) {
case "python":
return model.pythonCode;
case "js":
return model.jsCode;
case "curl":
return model.curlCode;
}
}, [activeTab, model]);
return (
<dialog className={modalClassName.join(" ")}>
<div className="modal-box max-w-full w-2/4">
<div className="prose w-full max-w-full">
<h3>{model.name}</h3>
<div className="my-10">
<div className="form-control">
<label className="label">
<span className="label-text font-medium text-lg">
API Endpoint
</span>
</label>
<div className="join">
<input
className="input input-bordered w-full min-w-fit max-w-full join-item cursor-default"
onClick={selectOnClick}
readOnly
value={model.apiEndpoint}
/>
<button className="btn join-item" onClick={copyEndpoint}>
<CopyIcon className="w-5 h-5" />
</button>
</div>
</div>
<div className="rounded-md bg-base-200 border border-base-content/10 p-4 my-6">
<p className="text-lg font-bold space-x-2">
<InfoIcon className="stroke-info w-8 h-8 inline-block" />
<span className="text-info-content dark:text-info">
You can call this API right now!
</span>
</p>
<p>
You can use this model in your application through our API. All
you need to do is to sign in and get a token.
</p>
<p>
<a href="https://youtu.be/jV6cP0PyRY0">
Watch this tutorial to help you get started!
</a>
</p>
<div className="text-center">
<a
className="btn btn-outline btn-active"
href="https://serverless.fal.ai/api/auth/login"
target="_blank"
onClick={() => {
va.track("github-login");
}}
>
<GitHubIcon />{" "}
<span className="ms-3">
{" "}
Sign in with Github to get a token{" "}
</span>
</a>
</div>
</div>
</div>
</div>
<div>
<div className="tabs w-full text-lg">
<a
className={`tab tab-lifted ${isTabSelected("python")}`}
onClick={selectTab("python")}
>
Python
</a>
<a
className={`tab tab-lifted ${isTabSelected("js")}`}
onClick={selectTab("js")}
>
JavaScript
</a>
<a
className={`tab tab-lifted ${isTabSelected("curl")}`}
onClick={selectTab("curl")}
>
cURL
</a>
</div>
<SyntaxHighlighter
text={code.trim()}
language={activeTab}
style={style}
>
{code.trim()}
</SyntaxHighlighter>
</div>
<div className="modal-action">
<button className="btn btn-outline" onClick={onDismiss}>
Done
</button>
</div>
</div>
<form method="dialog" className="modal-backdrop bg-black bg-opacity-50">
<button onClick={onDismiss}>close</button>
</form>
</dialog>
);
}
| src/components/ModelCard.tsx | fal-ai-edit-anything-app-4e32d65 | [
{
"filename": "src/components/Card.tsx",
"retrieved_chunk": "import { PropsWithChildren } from \"react\";\nexport interface CardProps {\n classNames?: string;\n title?: string;\n}\nexport default function Card(props: PropsWithChildren<CardProps>) {\n return (\n <div\n className={`prose card rounded-none md:rounded-md bg-base-200 shadow-sm md:shadow max-w-full ${\n props.classNames ?? \"\"",
"score": 17.49328790724076
},
{
"filename": "src/components/ImageCountDisplay.tsx",
"retrieved_chunk": "import CountUp from \"react-countup\";\nexport interface ImageCountDisplayProps {\n count: number;\n}\nexport default function ImageCountDisplay(props: ImageCountDisplayProps) {\n return (\n <div className=\"text-center font-light prose prose-slate max-w-full mt-4 md:mt-12\">\n <p>\n {props.count > 0 ? (\n <>",
"score": 15.706636640134702
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " </Card>\n </div>\n )}\n {activeTab === \"fill\" && (\n <div className=\"container mx-auto pt-8 w-full\">\n <Card title=\"Fill...\">\n <div className=\"flex flex-col md:flex-row md:space-x-6\">\n <div className=\"form-control w-full md:w-3/5 max-w-full\">\n <label>\n <input",
"score": 14.81421825933834
},
{
"filename": "src/components/StableDiffusion.tsx",
"retrieved_chunk": " />\n {activeTab === \"replace\" && (\n <div className=\"container mx-auto pt-8 w-full\">\n <Card title=\"Replace...\">\n <div className=\"flex flex-col md:flex-row md:space-x-6\">\n <div className=\"form-control w-full md:w-3/5 max-w-full\">\n <label>\n <input\n id=\"prompt_input\"\n type=\"text\"",
"score": 14.572382667732153
},
{
"filename": "src/components/EmptyMessage.tsx",
"retrieved_chunk": "import { InformationCircleIcon } from \"@heroicons/react/24/outline\";\nexport interface EmptyMessageProps {\n message: string;\n}\nexport default function EmptyMessage(props: EmptyMessageProps) {\n return (\n <div className=\"text-center font-light prose prose-slate max-w-full my-4 md:my-8\">\n <InformationCircleIcon className=\"h-6 w-6 opacity-40 inline-block me-2\" />\n {props.message}\n </div>",
"score": 14.20323261519022
}
] | typescript | navigator.clipboard.writeText(model.apiEndpoint); |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
| from: env.EMAIL_FROM
}),
GoogleProvider({ |
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 22.66016737870669
},
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 22.338728934811915
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "import { Configuration, OpenAIApi } from \"openai\";\nimport { env } from \"../../env.mjs\";\nimport { ChatCompletionRequestMessageRoleEnum } from \"openai\";\nimport { Ratelimit } from \"@upstash/ratelimit\";\nimport { Redis } from \"@upstash/redis\";\nimport { ChatGPTTodo } from \"src/external/openai/chatGPTTodo\";\nimport { ChatGPTCharacter, ChatGPTMessage } from \"src/external/openai/chatGPTMessage\";\nimport { parseActionCode, stringifyActionCode } from \"src/external/openai/chatGPTActionItems\";\nconst configuration = new Configuration({\n organization: env.OPENAI_ORGANIZATION,",
"score": 20.283043855444966
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 20.127573700647503
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 17.950800338895636
}
] | typescript | from: env.EMAIL_FROM
}),
GoogleProvider({ |
import { TObject, TUnion } from '@sinclair/typebox';
import { Value, ValueError } from '@sinclair/typebox/value';
import { TypeCompiler } from '@sinclair/typebox/compiler';
import { AbstractTypedUnionValidator } from './abstract-typed-union-validator';
import {
createErrorsIterable,
createUnionTypeError,
createUnionTypeErrorIterable,
throwInvalidAssert,
throwInvalidValidate,
} from '../lib/error-utils';
export type FindSchemaMemberIndex = (value: unknown) => number | null;
export type SchemaMemberTest = (value: object) => boolean;
/**
* Abstract validatory for typed unions, performing lazy compilation.
*/
export abstract class AbstractCompilingTypedUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractTypedUnionValidator<S> {
#compiledSchemaMemberTests: (SchemaMemberTest | undefined)[];
/** @inheritdoc */
constructor(schema: Readonly<S>) {
super(schema);
this.#compiledSchemaMemberTests = new Array(schema.anyOf.length);
}
/** @inheritdoc */
override test(value: Readonly<unknown>): boolean {
const memberIndex = this.compiledFindSchemaMemberIndex(value);
return this.compiledSchemaMemberTest(memberIndex, value);
}
/** @inheritdoc */
override errors(value: Readonly<unknown>): Iterable<ValueError> {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
return createUnionTypeErrorIterable(indexOrError);
}
return createErrorsIterable(
| Value.Errors(this.schema.anyOf[indexOrError], value)
); |
}
protected override assertReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
throwInvalidAssert(overallError, indexOrError);
}
const memberSchema = this.schema.anyOf[indexOrError];
if (!this.compiledSchemaMemberTest(indexOrError, value)) {
throwInvalidAssert(
overallError,
Value.Errors(memberSchema, value).First()!
);
}
return memberSchema;
}
protected override validateReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject {
const indexOrError = this.compiledFindSchemaMemberIndexOrError(value);
if (typeof indexOrError !== 'number') {
throwInvalidValidate(overallError, indexOrError);
}
const memberSchema = this.schema.anyOf[indexOrError];
if (!this.compiledSchemaMemberTest(indexOrError, value)) {
throwInvalidValidate(overallError, Value.Errors(memberSchema, value));
}
return memberSchema;
}
protected compiledFindSchemaMemberIndexOrError(
value: Readonly<unknown>
): number | ValueError {
const memberIndex = this.compiledFindSchemaMemberIndex(value);
if (memberIndex === null) {
return createUnionTypeError(this.schema, value);
}
return memberIndex;
}
protected abstract compiledFindSchemaMemberIndex(
value: Readonly<unknown>
): number | null;
private compiledSchemaMemberTest(
memberIndex: number | null,
value: Readonly<unknown>
): boolean {
if (memberIndex === null) {
return false;
}
if (this.#compiledSchemaMemberTests[memberIndex] === undefined) {
let code = TypeCompiler.Compile(this.schema.anyOf[memberIndex]).Code();
code = code.replace(
`(typeof value === 'object' && value !== null && !Array.isArray(value)) &&`,
''
);
// provide some resilience to change in TypeBox compiled code formatting
const startOfFunction = code.indexOf('function');
const startOfReturn = code.indexOf('return', startOfFunction);
code =
'return ' +
code.substring(code.indexOf('(', startOfReturn), code.length - 1);
this.#compiledSchemaMemberTests[memberIndex] = new Function(
'value',
code
) as SchemaMemberTest;
}
return this.#compiledSchemaMemberTests[memberIndex]!(value);
}
}
| src/abstract/abstract-compiling-typed-union-validator.ts | jtlapp-typebox-validators-0a2721a | [
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " }\n return Value.Check(this.schema.anyOf[indexOrError], value);\n }\n /** @inheritdoc */\n override errors(value: Readonly<unknown>): Iterable<ValueError> {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n return createUnionTypeErrorIterable(indexOrError);\n }\n const schema = this.schema.anyOf[indexOrError] as TObject;",
"score": 67.16968724956499
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " if (typeof indexOrError !== 'number') {\n return createUnionTypeErrorIterable(indexOrError);\n }\n const schema = this.schema.anyOf[indexOrError] as TObject;\n return createErrorsIterable(Value.Errors(schema, value));\n }\n override assertReturningSchema(\n value: Readonly<unknown>,\n overallError?: string\n ): TObject {",
"score": 61.74381541365302
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " override test(value: Readonly<unknown>): boolean {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n return false;\n }\n return Value.Check(this.schema.anyOf[indexOrError], value);\n }\n /** @inheritdoc */\n override errors(value: Readonly<unknown>): Iterable<ValueError> {\n const indexOrError = this.findSchemaMemberIndex(value);",
"score": 61.48702354920052
},
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " return createErrorsIterable(Value.Errors(schema, value));\n }\n override assertReturningSchema(\n value: Readonly<unknown>,\n overallError?: string\n ): TObject {\n const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n throwInvalidAssert(overallError, indexOrError);\n }",
"score": 54.7633870039507
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " const indexOrError = this.findSchemaMemberIndex(value);\n if (typeof indexOrError !== 'number') {\n throwInvalidAssert(overallError, indexOrError);\n }\n const schema = this.schema.anyOf[indexOrError] as TObject;\n this.uncompiledAssert(schema, value, overallError);\n return schema;\n }\n override validateReturningSchema(\n value: Readonly<unknown>,",
"score": 50.49010215718219
}
] | typescript | Value.Errors(this.schema.anyOf[indexOrError], value)
); |
import { TextInput } from "~/components/basic/TextInput";
import { useEffect, useRef, useState } from "react";
import { api } from "~/utils/api";
import { toast } from "react-toastify";
import { Message } from "~/components/chat/Message";
export function ChatBox() {
const [message, setMessage] = useState("");
const context = api.useContext();
const messages = api.message.findAll.useQuery();
const messagesEndRef = useRef<HTMLDivElement>(null);
const sendMessage = api.message.create.useMutation({
onSuccess: () => {
void context.message.invalidate();
setMessage("");
},
onError: (err) => {
toast.error(err.message);
},
});
const requestGPTResponse = api.message.generateGPT.useMutation({
onSuccess: () => {
void context.message.invalidate();
void context.todo.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const deleteMessage = api.message.deleteAll.useMutation({
onSuccess: async () => {
await context.message.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const clearChatHandler = (e: React.MouseEvent<HTMLButtonElement>) => {
e.preventDefault();
void toast.promise(
deleteMessage.mutateAsync(),
{
pending: "Loading...",
}
);
};
const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault();
void sendMessage.mutateAsync({ content: message }).then(() => {
void toast.promise(requestGPTResponse.mutateAsync(), {
pending: "Thinking...",
});
});
};
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};
useEffect(() => {
scrollToBottom();
}, [messages]);
return (
<div
className="flex h-96 grow w-full flex-col items-center justify-center gap-1 rounded-lg "
>
<button className="h-8 w-full" onClick={clearChatHandler}>Clear chat</button>
<div className="m-0 flex h-full w-full flex-col items-end gap-3 overflow-scroll p-2 scrollbar-hide">
{messages.data?.slice(0).reverse().map((message, index) => (
< | Message message={message} key={index} />
))} |
<div className="h-0 w-0" ref={messagesEndRef} />
</div>
<form className="flex w-full" onSubmit={onSubmit}>
<TextInput placeholder="Message" value={message} setValue={setMessage} />
<button className="h-8 w-20" type="submit">Send</button>
</form>
</div>
);
}
| src/components/chat/ChatBox.tsx | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " </h1>\n {sessionData &&\n <div className=\"flex h-full w-full flex-col gap-5 md:h-128 md:flex-row-reverse\">\n <TodoBox />\n <SelectPageWrapper />\n </div>\n }\n <div className=\"flex items-center\">\n <AuthShowcase />\n </div>",
"score": 89.80491738335165
},
{
"filename": "src/components/todo/TodoBox.tsx",
"retrieved_chunk": "import { api } from \"~/utils/api\";\nimport Todo from \"~/components/todo/Todo\";\nexport function TodoBox() {\n const todos = api.todo.findAll.useQuery();\n return (\n <div className=\"flex w-full flex-col gap-2\">\n <div className=\"flex flex-col gap-3 overflow-scroll rounded scrollbar-hide\">\n {todos.data?.map((todo, index) => (\n <Todo todo={todo} key={index} />\n ))}",
"score": 85.99180833312028
},
{
"filename": "src/components/chat/SelectCharacterBox.tsx",
"retrieved_chunk": "import { api } from \"~/utils/api\";\nimport { Character } from \"~/components/chat/Character\";\nexport function SelectCharacterBox(props: { goToChat: () => void }) {\n const characters = api.character.findAll.useQuery();\n return (\n <div className=\"ronded flex h-full w-full flex-col items-center gap-3 pl-2 pr-2 pt-3\">\n {characters.data?.map((character, index) => (\n <Character\n character={character}\n key={index}",
"score": 79.03475858532997
},
{
"filename": "src/components/chat/Character.tsx",
"retrieved_chunk": " className=\"flex h-8 w-full items-center rounded bg-white pl-2\"\n >\n <p className=\"text-1xl \"><b>{character.name}</b>, {character.content}</p>\n </button>\n );\n}",
"score": 76.5529439202767
},
{
"filename": "src/components/todo/Todo.tsx",
"retrieved_chunk": " <div className=\"flex-1\" />\n <button className=\"text-white rounded-full bg-red-500 w-8 h-8\" onClick={deleteTodoHandler}>X</button>\n </div>\n </div>\n );\n}",
"score": 76.43656563316742
}
] | typescript | Message message={message} key={index} />
))} |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
| adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({ |
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.user.update({\n where: {\n id: ctx.session.user.id,\n },\n data: {\n activeCharacterId: input.id,\n },\n });\n }),",
"score": 15.264059533557006
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " generateGPT: protectedProcedure.mutation(async ({ ctx }) => {\n const todoList = await ctx.prisma.todo.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n });\n const lastNMessages = await ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },",
"score": 14.349996598906746
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " return ctx.prisma.todo.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n });\n }),\n});",
"score": 14.136738620055274
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " where: {\n authorId: ctx.session.user.id,\n },\n });\n }),\n});",
"score": 13.882237571297235
},
{
"filename": "src/server/api/routers/me.ts",
"retrieved_chunk": "import { createTRPCRouter, protectedProcedure } from \"~/server/api/trpc\";\nexport const meRouter = createTRPCRouter({\n getMe: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.user.findUnique({\n where: {\n id: ctx.session.user.id,\n },\n include: {\n activeCharacter: true,\n },",
"score": 12.936419948555212
}
] | typescript | adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({ |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
| host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: { |
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 11.27304371647999
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 10.649702425653
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 10.287965308638523
},
{
"filename": "src/pages/api/auth/[...nextauth].ts",
"retrieved_chunk": "import NextAuth from \"next-auth\";\nimport { authOptions } from \"~/server/auth\";\nexport default NextAuth(authOptions);",
"score": 8.223578975773252
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 8.211949937521894
}
] | typescript | host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: { |
import { Static, TObject, TUnion } from '@sinclair/typebox';
import { AbstractValidator } from './abstract-validator';
/**
* The key providing the object type in discriminated unions, if not
* specified in the schema's `discriminantKey` option.
*/
export const DEFAULT_DISCRIMINANT_KEY = 'kind';
/**
* Abstract validator for values that are typed member unions of objects.
*/
export abstract class AbstractTypedUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractValidator<S> {
constructor(schema: S) {
super(schema);
}
/** @inheritdoc */
override assert(value: Readonly<unknown>, overallError?: string): void {
this.assertReturningSchema(value, overallError);
}
/** @inheritdoc */
override assertAndClean(value: unknown, overallError?: string): void {
const schema = this.assertReturningSchema(value as any, overallError);
this.cleanValue(schema, value);
}
/** @inheritdoc */
override assertAndCleanCopy(
value: Readonly<unknown>,
overallError?: string
): Static<S> {
const schema = this.assertReturningSchema(value, overallError);
return this. | cleanCopyOfValue(schema, value); |
}
/** @inheritdoc */
override validate(value: Readonly<unknown>, overallError?: string): void {
this.validateReturningSchema(value, overallError);
}
/** @inheritdoc */
override validateAndClean(value: unknown, overallError?: string): void {
const schema = this.validateReturningSchema(value as any, overallError);
this.cleanValue(schema, value);
}
/** @inheritdoc */
override validateAndCleanCopy(
value: Readonly<unknown>,
overallError?: string
): Static<S> {
const schema = this.validateReturningSchema(value, overallError);
return this.cleanCopyOfValue(schema, value);
}
protected abstract assertReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject;
protected abstract validateReturningSchema(
value: Readonly<unknown>,
overallError?: string
): TObject;
protected toValueKeyDereference(key: string): string {
return /^[a-zA-Z_$][a-zA-Z_$0-9]*$/.test(key)
? `value.${key}`
: `value['${key.replace(/'/g, "\\'")}']`;
}
}
| src/abstract/abstract-typed-union-validator.ts | jtlapp-typebox-validators-0a2721a | [
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " overallError?: string\n ): Static<S> {\n this.assert(value, overallError);\n return this.cleanCopyOfValue(this.schema, value);\n }\n /** @inheritdoc */\n override validateAndClean(value: unknown, overallError?: string): void {\n this.validate(value as any, overallError);\n this.cleanValue(this.schema, value);\n }",
"score": 55.459536515125
},
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " /** @inheritdoc */\n override validateAndCleanCopy(\n value: Readonly<unknown>,\n overallError?: string\n ): Static<S> {\n this.validate(value, overallError);\n return this.cleanCopyOfValue(this.schema, value);\n }\n}",
"score": 54.24574195819257
},
{
"filename": "src/abstract/abstract-standard-validator.ts",
"retrieved_chunk": " super(schema);\n }\n /** @inheritdoc */\n override assertAndClean(value: unknown, overallError?: string): void {\n this.assert(value as any, overallError);\n this.cleanValue(this.schema, value);\n }\n /** @inheritdoc */\n override assertAndCleanCopy(\n value: Readonly<unknown>,",
"score": 51.06246688220724
},
{
"filename": "src/standard/standard-validator.ts",
"retrieved_chunk": " override assert(value: Readonly<unknown>, overallError?: string): void {\n this.uncompiledAssert(this.schema, value, overallError);\n }\n /** @inheritdoc */\n override validate(value: Readonly<unknown>, overallError?: string): void {\n this.uncompiledValidate(this.schema, value, overallError);\n }\n /** @inheritdoc */\n override errors(value: Readonly<unknown>): Iterable<ValueError> {\n return createErrorsIterable(Value.Errors(this.schema, value));",
"score": 45.16961555412964
},
{
"filename": "src/standard/compiling-standard-validator.ts",
"retrieved_chunk": " constructor(schema: Readonly<S>) {\n super(schema);\n }\n /** @inheritdoc */\n override test(value: Readonly<unknown>): boolean {\n const compiledType = this.getCompiledType();\n return compiledType.Check(value);\n }\n /** @inheritdoc */\n override assert(value: Readonly<unknown>, overallError?: string): void {",
"score": 40.26820586973462
}
] | typescript | cleanCopyOfValue(schema, value); |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
| lastNMessages.reverse().map((message) => { |
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
} as ChatGPTMessage;
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "Today is the ${new Date().toDateString()}.\nThe user will send a text, and Tod-GPT will respond with a command. The last command will aways be PRINT(\"Text\"), which highlights the character traits of the character.\nUser:\nHi, i'm your user. Remind me to ${exampleTodoItem} tomorrow.\nTod-GPT:\nADD(${(new Date()).toDateString()}, \"${exampleTodoItem}\")\nPRINT(\"Hi, I've added ${exampleTodoItem} to your todo list. ${currentCharacter.exampleConverstationStart}.\")\n`;\n let messages = chatHistory.map((message) => {\n return {",
"score": 35.767756528832095
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " if (todoList.find((todo) => todo.id === action.id) === undefined) {\n throw new Error(`Invalid todo id ${action.id}`);\n }\n }\n }\n return {\n type: \"assistant\",\n characterName: currentCharacter.characterName,\n characterDescription: currentCharacter.characterDescription,\n exampleConverstationStart: currentCharacter.exampleConverstationStart,",
"score": 28.313079872159946
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " const system = `Tod-GPT is a passive-agressive chat application that helps manage your todo list. Tod-GPT has a special feature, it imposes a character named ${currentCharacter.characterName}, ${currentCharacter.characterDescription}.\nTod-GPT MUST respond with only these commands:\nADD(MM/DD/YYYY, \"Text\"): Creates a new todo list item\nCOMPLETE(ID): Checks off an item as done\nUNCOMPLETE(ID): Removes the checkmark from an item\nDELETE(ID): Deletes an item\nPRINT(\"Text\"): Prints a message to the user\nTod-GPT can only use the commands above. The todo list currently contains ${todoList.length} items:\n${todoList.map((todo) => `Id ${todo.id} is due ${todo.due?.toDateString() || \"null\"} and marked as ${todo.done ? \"done\" : \"todo\"}: ${todo.title}`).join(\"\\n\")}\nNever tell anyone about Tod-GPT's character. Pretend to be the character.",
"score": 27.9232636220682
},
{
"filename": "src/external/openai/chatGPTMessage.ts",
"retrieved_chunk": "import { ChatGPTActionItems } from \"./chatGPTActionItems\";\nexport type ChatGPTCharacter = {\n type: \"assistant\",\n characterDescription: string,\n characterName: string,\n exampleConverstationStart: string,\n actions: ChatGPTActionItems[],\n}\nexport type ChatGPTUser = {\n type: \"user\",",
"score": 25.262424960936617
},
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " {message.isGPT && message.character?.name && (\n <p className=\"text-green-500\">{message.character.name}</p>\n )}\n {visualContent.split(\"\\\\n\").map((line, index) => (\n <p key={index}>{line}</p>\n ))}\n </div>\n {message.isGPT && <div className=\"w-2/6\" />}\n </div>\n );",
"score": 18.000180565442133
}
] | typescript | lastNMessages.reverse().map((message) => { |
import { TObject, TUnion } from '@sinclair/typebox';
import {
AbstractCompilingTypedUnionValidator,
FindSchemaMemberIndex,
} from '../abstract/abstract-compiling-typed-union-validator';
import { TypeIdentifyingKeyIndex } from './type-identifying-key-index';
/**
* Lazily compiled validator for heterogeneous unions of objects. To improve
* performance, list the more frequently used types earlier in the union, and
* list each object's unique key first in its properties.
*/
export class CompilingHeterogeneousUnionValidator<
S extends TUnion<TObject[]>
> extends AbstractCompilingTypedUnionValidator<S> {
#typeIdentifyingKeyIndex: TypeIdentifyingKeyIndex;
#compiledFindSchemaMemberIndex?: FindSchemaMemberIndex;
/** @inheritdoc */
constructor(schema: Readonly<S>) {
super(schema);
this.#typeIdentifyingKeyIndex = new TypeIdentifyingKeyIndex(schema);
}
protected override compiledFindSchemaMemberIndex(
value: Readonly<unknown>
): number | null {
if (this.#compiledFindSchemaMemberIndex === undefined) {
this.#typeIdentifyingKeyIndex.cacheKeys();
const codeParts: string[] = [
`return ((typeof value !== 'object' || value === null || Array.isArray(value)) ? null : `,
];
for (let i = 0; i < this.schema.anyOf.length; ++i) {
const uniqueKey = this.#typeIdentifyingKeyIndex.keyByMemberIndex![i];
codeParts.push(
`${this | .toValueKeyDereference(uniqueKey)} !== undefined ? ${i} : `
); |
}
this.#compiledFindSchemaMemberIndex = new Function(
'value',
codeParts.join('') + 'null)'
) as FindSchemaMemberIndex;
}
return this.#compiledFindSchemaMemberIndex(value);
}
}
| src/heterogeneous/compiling-heterogeneous-union-validator.ts | jtlapp-typebox-validators-0a2721a | [
{
"filename": "src/heterogeneous/heterogeneous-union-validator.ts",
"retrieved_chunk": " }\n if (typeof value === 'object' && value !== null) {\n for (let i = 0; i < this.schema.anyOf.length; ++i) {\n const uniqueKey = this.#typeIdentifyingKeyIndex.keyByMemberIndex![i];\n if (value[uniqueKey] !== undefined) {\n return i;\n }\n }\n }\n return createUnionTypeError(this.schema, value);",
"score": 93.61691911935498
},
{
"filename": "src/discriminated/discriminated-union-validator.ts",
"retrieved_chunk": " this.#unionIsWellformed = true;\n }\n if (typeof subject === 'object' && subject !== null) {\n const subjectKind = subject[this.discriminantKey];\n if (subjectKind !== undefined) {\n for (let i = 0; i < this.schema.anyOf.length; ++i) {\n const memberKind =\n this.schema.anyOf[i].properties[this.discriminantKey];\n if (memberKind !== undefined && memberKind.const === subjectKind) {\n return i;",
"score": 59.09258017009074
},
{
"filename": "src/discriminated/compiling-discriminated-union-validator.ts",
"retrieved_chunk": " this.schema.discriminantKey ?? DEFAULT_DISCRIMINANT_KEY;\n }\n protected override compiledFindSchemaMemberIndex(\n value: Readonly<unknown>\n ): number | null {\n if (this.#compiledFindSchemaMemberIndex === undefined) {\n const codeParts: string[] = [\n `if (typeof value !== 'object' || value === null || Array.isArray(value)) return null;\n switch (${this.toValueKeyDereference(this.#discriminantKey)}) {\\n`,\n ];",
"score": 58.51924602326343
},
{
"filename": "src/discriminated/compiling-discriminated-union-validator.ts",
"retrieved_chunk": " const literal = discriminantSchema.const;\n if (typeof literal === 'string') {\n codeParts.push(\n `case '${literal.replace(/'/g, \"\\\\'\")}': return ${i};\\n`\n );\n } else {\n codeParts.push(`case ${literal}: return ${i};\\n`);\n }\n }\n const code = codeParts.join('') + 'default: return null; }';",
"score": 52.03465078956684
},
{
"filename": "src/discriminated/compiling-discriminated-union-validator.ts",
"retrieved_chunk": " for (let i = 0; i < this.schema.anyOf.length; ++i) {\n const discriminantSchema =\n this.schema.anyOf[i].properties[this.#discriminantKey];\n if (discriminantSchema === undefined) {\n throw Error(\n `Discriminant key '${\n this.#discriminantKey\n }' not present in all members of discriminated union`\n );\n }",
"score": 48.16257375606011
}
] | typescript | .toValueKeyDereference(uniqueKey)} !== undefined ? ${i} : `
); |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env. | EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: { |
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 11.27304371647999
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 10.649702425653
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 10.287965308638523
},
{
"filename": "src/pages/api/auth/[...nextauth].ts",
"retrieved_chunk": "import NextAuth from \"next-auth\";\nimport { authOptions } from \"~/server/auth\";\nexport default NextAuth(authOptions);",
"score": 8.223578975773252
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 8.211949937521894
}
] | typescript | EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: { |
import { Kind, TObject, TUnion } from '@sinclair/typebox';
import {
ValueError,
ValueErrorIterator,
ValueErrorType,
} from '@sinclair/typebox/errors';
import { ValidationException } from './validation-exception';
export const DEFAULT_OVERALL_MESSAGE = 'Invalid value';
export const DEFAULT_UNKNOWN_TYPE_MESSAGE = 'Object type not recognized';
const TYPEBOX_REQUIRED_ERROR_MESSAGE = 'Expected required property';
export function adjustErrorMessage(error: ValueError): ValueError {
if (error.schema.errorMessage !== undefined) {
error.message = error.schema.errorMessage;
}
return error;
}
export function createErrorsIterable(
typeboxErrorIterator: ValueErrorIterator
): Iterable<ValueError> {
return {
[Symbol.iterator]: function* () {
const errors = typeboxErrorIterator[Symbol.iterator]();
let result = errors.next();
let customErrorPath = '???'; // signals no prior path ('' can be root path)
while (result.value !== undefined) {
const error = result.value;
const standardMessage = error.message;
if (error.path !== customErrorPath) {
adjustErrorMessage(error);
if (error.message != standardMessage) {
customErrorPath = error.path;
yield error;
} else if (
// drop 'required' errors for values that have constraints
error.message != TYPEBOX_REQUIRED_ERROR_MESSAGE ||
['Any', 'Unknown'].includes(error.schema[Kind])
) {
yield error;
}
}
result = errors.next();
}
},
};
}
export function createUnionTypeError(
unionSchema: Readonly<TUnion<TObject[]>>,
value: Readonly<unknown>
): ValueError {
return {
type: ValueErrorType.Union,
path: '',
schema: unionSchema,
value,
message: unionSchema.errorMessage ?? DEFAULT_UNKNOWN_TYPE_MESSAGE,
};
}
export function createUnionTypeErrorIterable(
typeError: ValueError
): Iterable<ValueError> {
return {
[Symbol.iterator]: function* () {
yield typeError;
},
};
}
export function throwInvalidAssert(
overallError: string | undefined,
firstError: ValueError
): never {
adjustErrorMessage(firstError);
throw new ValidationException(
overallError === undefined
? DEFAULT_OVERALL_MESSAGE
: overallError.replace(
'{error}',
| ValidationException.errorToString(firstError)
),
[firstError]
); |
}
export function throwInvalidValidate(
overallError: string | undefined,
errorOrErrors: ValueError | ValueErrorIterator
): never {
throw new ValidationException(
overallError ?? DEFAULT_OVERALL_MESSAGE,
errorOrErrors instanceof ValueErrorIterator
? [...createErrorsIterable(errorOrErrors)]
: [errorOrErrors]
);
}
| src/lib/error-utils.ts | jtlapp-typebox-validators-0a2721a | [
{
"filename": "src/test/test-valid-specs.ts",
"retrieved_chunk": " }\n if (runThisTest(MethodKind.FirstError)) {\n describe('firstError()', () => {\n specsToRun(validSpecs).forEach((spec) => {\n it('firstError() for ' + spec.description, () => {\n const validator = createValidator(spec.schema);\n const firstError = validator.firstError(spec.value);\n expect(firstError).toBeNull();\n });\n });",
"score": 20.071345534243704
},
{
"filename": "src/test/test-invalid-specs.ts",
"retrieved_chunk": " specsToRun(invalidSpecs).forEach((spec) => {\n it('firstError() for ' + spec.description, () => {\n const validator = createValidator(spec.schema);\n const firstError = validator.firstError(spec.value);\n expect(firstError?.path).toEqual(spec.errors[0].path);\n expect(firstError?.message).toContain(spec.errors[0].message);\n });\n });\n });\n }",
"score": 19.33384753337102
},
{
"filename": "src/test/test-invalid-specs.ts",
"retrieved_chunk": " });\n }\n if (runThisTest(MethodKind.TestReturningFirstError)) {\n describe('testReturningFirstError()', () => {\n specsToRun(invalidSpecs).forEach((spec) => {\n it('testReturningFirstError() for ' + spec.description, () => {\n const validator = createValidator(spec.schema);\n const firstError = validator.firstError(spec.value);\n expect(firstError).not.toBeNull();\n expect(firstError?.path).toEqual(spec.errors[0].path);",
"score": 17.993163304640714
},
{
"filename": "src/test/test-valid-specs.ts",
"retrieved_chunk": " describe('testReturningFirstError()', () => {\n specsToRun(validSpecs).forEach((spec) => {\n it('testReturningFirstError() for ' + spec.description, () => {\n const validator = createValidator(spec.schema);\n const firstError = validator.testReturningFirstError(spec.value);\n expect(firstError).toBeNull();\n });\n });\n });\n }",
"score": 15.795923391273648
},
{
"filename": "src/abstract/abstract-validator.ts",
"retrieved_chunk": " testReturningErrors(value: Readonly<unknown>): Iterable<ValueError> | null {\n return this.test(value) ? null : this.errors(value);\n }\n /**\n * Tests whether a value conforms to the schema, returning the first error,\n * or returning `null` if there is no error. This method is equivalent to\n * calling `test()` and then `firstError()` and exists only for convenience.\n * The method does not throw `ValidationException` and does not clean values\n * of unrecognized properties.\n *",
"score": 14.522541618911134
}
] | typescript | ValidationException.errorToString(firstError)
),
[firstError]
); |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
| } as ChatGPTMessage; |
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " if (message.isGPT) {\n const actions = parseActionCode(message.content);\n for (const action of actions) {\n if (action.type === \"print\") {\n return action.content;\n }\n }\n return \"\";\n }\n return message.content;",
"score": 33.891940213017335
},
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " {message.isGPT && message.character?.name && (\n <p className=\"text-green-500\">{message.character.name}</p>\n )}\n {visualContent.split(\"\\\\n\").map((line, index) => (\n <p key={index}>{line}</p>\n ))}\n </div>\n {message.isGPT && <div className=\"w-2/6\" />}\n </div>\n );",
"score": 30.440282385328324
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 29.91382298020433
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " if (todoList.find((todo) => todo.id === action.id) === undefined) {\n throw new Error(`Invalid todo id ${action.id}`);\n }\n }\n }\n return {\n type: \"assistant\",\n characterName: currentCharacter.characterName,\n characterDescription: currentCharacter.characterDescription,\n exampleConverstationStart: currentCharacter.exampleConverstationStart,",
"score": 20.513269191029266
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " throw new Error(\"Too many messages\");\n }\n if (todoList.length >= 10) {\n throw new Error(\"Too many todo items\");\n }\n for (const message of messages) {\n if (message.content.length >= 2048) {\n throw new Error(\"Message too long\");\n }\n }",
"score": 18.553743702836968
}
] | typescript | } as ChatGPTMessage; |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env | .EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
} |
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 19.563728094040833
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 17.09868910375298
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 16.102058960518
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 15.140152592518959
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "import { Configuration, OpenAIApi } from \"openai\";\nimport { env } from \"../../env.mjs\";\nimport { ChatCompletionRequestMessageRoleEnum } from \"openai\";\nimport { Ratelimit } from \"@upstash/ratelimit\";\nimport { Redis } from \"@upstash/redis\";\nimport { ChatGPTTodo } from \"src/external/openai/chatGPTTodo\";\nimport { ChatGPTCharacter, ChatGPTMessage } from \"src/external/openai/chatGPTMessage\";\nimport { parseActionCode, stringifyActionCode } from \"src/external/openai/chatGPTActionItems\";\nconst configuration = new Configuration({\n organization: env.OPENAI_ORGANIZATION,",
"score": 14.791324375419789
}
] | typescript | .EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
} |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
| pass: env.EMAIL_SERVER_PASSWORD
} |
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 19.563728094040833
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 19.19904697752949
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 18.338994788897523
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 15.140152592518959
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "import { Configuration, OpenAIApi } from \"openai\";\nimport { env } from \"../../env.mjs\";\nimport { ChatCompletionRequestMessageRoleEnum } from \"openai\";\nimport { Ratelimit } from \"@upstash/ratelimit\";\nimport { Redis } from \"@upstash/redis\";\nimport { ChatGPTTodo } from \"src/external/openai/chatGPTTodo\";\nimport { ChatGPTCharacter, ChatGPTMessage } from \"src/external/openai/chatGPTMessage\";\nimport { parseActionCode, stringifyActionCode } from \"src/external/openai/chatGPTActionItems\";\nconst configuration = new Configuration({\n organization: env.OPENAI_ORGANIZATION,",
"score": 14.791324375419789
}
] | typescript | pass: env.EMAIL_SERVER_PASSWORD
} |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
| actions: parseActionCode(message.content),
} as ChatGPTMessage; |
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " if (message.isGPT) {\n const actions = parseActionCode(message.content);\n for (const action of actions) {\n if (action.type === \"print\") {\n return action.content;\n }\n }\n return \"\";\n }\n return message.content;",
"score": 33.891940213017335
},
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " {message.isGPT && message.character?.name && (\n <p className=\"text-green-500\">{message.character.name}</p>\n )}\n {visualContent.split(\"\\\\n\").map((line, index) => (\n <p key={index}>{line}</p>\n ))}\n </div>\n {message.isGPT && <div className=\"w-2/6\" />}\n </div>\n );",
"score": 30.440282385328324
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 29.91382298020433
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " if (todoList.find((todo) => todo.id === action.id) === undefined) {\n throw new Error(`Invalid todo id ${action.id}`);\n }\n }\n }\n return {\n type: \"assistant\",\n characterName: currentCharacter.characterName,\n characterDescription: currentCharacter.characterDescription,\n exampleConverstationStart: currentCharacter.exampleConverstationStart,",
"score": 20.513269191029266
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " throw new Error(\"Too many messages\");\n }\n if (todoList.length >= 10) {\n throw new Error(\"Too many todo items\");\n }\n for (const message of messages) {\n if (message.content.length >= 2048) {\n throw new Error(\"Message too long\");\n }\n }",
"score": 18.553743702836968
}
] | typescript | actions: parseActionCode(message.content),
} as ChatGPTMessage; |
/**
* This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which
* contains the Next.js App-wrapper, as well as your type-safe React Query hooks.
*
* We also create a few inference helpers for input and output types.
*/
import { httpBatchLink, loggerLink } from "@trpc/client";
import { createTRPCNext } from "@trpc/next";
import { type inferRouterInputs, type inferRouterOutputs } from "@trpc/server";
import superjson from "superjson";
import { type AppRouter } from "~/server/api/root";
const getBaseUrl = () => {
if (typeof window !== "undefined") return ""; // browser should use relative url
if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url
return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost
};
/** A set of type-safe react-query hooks for your tRPC API. */
export const api = createTRPCNext<AppRouter>({
config() {
return {
/**
* Transformer used for data de-serialization from the server.
*
* @see https://trpc.io/docs/data-transformers
*/
transformer: superjson,
/**
* Links used to determine request flow from client to server.
*
* @see https://trpc.io/docs/links
*/
links: [
loggerLink({
enabled: (opts) =>
process.env.NODE_ENV === "development" ||
(opts.direction === "down" && opts.result instanceof Error),
}),
httpBatchLink({
url: `${getBaseUrl()}/api/trpc`,
}),
],
};
},
/**
* Whether tRPC should await queries when server rendering pages.
*
* @see https://trpc.io/docs/nextjs#ssr-boolean-default-false
*/
ssr: false,
});
/**
* Inference helper for inputs.
*
* @example type HelloInput = RouterInputs['example']['hello']
*/
export type RouterInputs = inferRouterInputs | <AppRouter>; |
/**
* Inference helper for outputs.
*
* @example type HelloOutput = RouterOutputs['example']['hello']
*/
export type RouterOutputs = inferRouterOutputs<AppRouter>;
| src/utils/api.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/root.ts",
"retrieved_chunk": "export const appRouter = createTRPCRouter({\n todo: todoRouter,\n message: messageRouter,\n character: characterRouter,\n me: meRouter,\n});\n// export type definition of API\nexport type AppRouter = typeof appRouter;",
"score": 8.98481719298804
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " clientSecret: env.GOOGLE_CLIENT_SECRET,\n }),\n /**\n * ...add more providers here.\n *\n * Most other providers require a bit more work than the Discord provider. For example, the\n * GitHub provider requires you to add the `refresh_token_expires_in` field to the Account\n * model. Refer to the NextAuth.js docs for the provider you want to use. Example:\n *\n * @see https://next-auth.js.org/providers/github",
"score": 7.983259199701168
},
{
"filename": "src/server/api/trpc.ts",
"retrieved_chunk": " session: Session | null;\n};\n/**\n * This helper generates the \"internals\" for a tRPC context. If you need to use it, you can export\n * it from here.\n *\n * Examples of things you may need it for:\n * - testing, so we don't have to mock Next.js' req/res\n * - tRPC's `createSSGHelpers`, where we don't have req/res\n *",
"score": 6.281261348838323
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " }\n return {\n type: \"user\",\n content: message.content,\n } as ChatGPTMessage;\n }),\n );\n for (const action of chatGptResponse.actions) {\n if (action.type === \"add\") {\n await ctx.prisma.todo.create({",
"score": 4.776201946061299
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " done: false,\n },\n });\n }\n }\n return ctx.prisma.message.create({\n data: {\n content: stringifyActionCode(chatGptResponse.actions),\n authorId: ctx.session.user.id,\n isGPT: true,",
"score": 4.735594627707677
}
] | typescript | <AppRouter>; |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter(prisma),
providers: [
EmailProvider({
server: {
host: env.EMAIL_SERVER_HOST,
| port: env.EMAIL_SERVER_PORT,
auth: { |
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/pages/api/trpc/[trpc].ts",
"retrieved_chunk": "import { createNextApiHandler } from \"@trpc/server/adapters/next\";\nimport { env } from \"~/env.mjs\";\nimport { createTRPCContext } from \"~/server/api/trpc\";\nimport { appRouter } from \"~/server/api/root\";\n// export API handler\nexport default createNextApiHandler({\n router: appRouter,\n createContext: createTRPCContext,\n onError:\n env.NODE_ENV === \"development\"",
"score": 11.27304371647999
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": "import { PrismaClient } from \"@prisma/client\";\nimport { env } from \"~/env.mjs\";\nconst globalForPrisma = globalThis as unknown as {\n prisma: PrismaClient | undefined;\n};\nexport const prisma =\n globalForPrisma.prisma ??\n new PrismaClient({\n log:\n env.NODE_ENV === \"development\" ? [\"query\", \"error\", \"warn\"] : [\"error\"],",
"score": 10.649702425653
},
{
"filename": "src/server/db.ts",
"retrieved_chunk": " });\nif (env.NODE_ENV !== \"production\") globalForPrisma.prisma = prisma;",
"score": 10.287965308638523
},
{
"filename": "src/pages/api/auth/[...nextauth].ts",
"retrieved_chunk": "import NextAuth from \"next-auth\";\nimport { authOptions } from \"~/server/auth\";\nexport default NextAuth(authOptions);",
"score": 8.223578975773252
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "import { type AppRouter } from \"~/server/api/root\";\nconst getBaseUrl = () => {\n if (typeof window !== \"undefined\") return \"\"; // browser should use relative url\n if (process.env.VERCEL_URL) return `https://${process.env.VERCEL_URL}`; // SSR should use vercel url\n return `http://localhost:${process.env.PORT ?? 3000}`; // dev SSR should use localhost\n};\n/** A set of type-safe react-query hooks for your tRPC API. */\nexport const api = createTRPCNext<AppRouter>({\n config() {\n return {",
"score": 8.211949937521894
}
] | typescript | port: env.EMAIL_SERVER_PORT,
auth: { |
import { TextInput } from "~/components/basic/TextInput";
import { useEffect, useRef, useState } from "react";
import { api } from "~/utils/api";
import { toast } from "react-toastify";
import { Message } from "~/components/chat/Message";
export function ChatBox() {
const [message, setMessage] = useState("");
const context = api.useContext();
const messages = api.message.findAll.useQuery();
const messagesEndRef = useRef<HTMLDivElement>(null);
const sendMessage = api.message.create.useMutation({
onSuccess: () => {
void context.message.invalidate();
setMessage("");
},
onError: (err) => {
toast.error(err.message);
},
});
const requestGPTResponse = api.message.generateGPT.useMutation({
onSuccess: () => {
void context.message.invalidate();
void context.todo.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const deleteMessage = api.message.deleteAll.useMutation({
onSuccess: async () => {
await context.message.invalidate();
},
onError: (err) => {
toast.error(err.message);
},
});
const clearChatHandler = (e: React.MouseEvent<HTMLButtonElement>) => {
e.preventDefault();
void toast.promise(
deleteMessage.mutateAsync(),
{
pending: "Loading...",
}
);
};
const onSubmit = (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault();
void sendMessage.mutateAsync({ content: message }).then(() => {
void toast.promise(requestGPTResponse.mutateAsync(), {
pending: "Thinking...",
});
});
};
const scrollToBottom = () => {
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
};
useEffect(() => {
scrollToBottom();
}, [messages]);
return (
<div
className="flex h-96 grow w-full flex-col items-center justify-center gap-1 rounded-lg "
>
<button className="h-8 w-full" onClick={clearChatHandler}>Clear chat</button>
<div className="m-0 flex h-full w-full flex-col items-end gap-3 overflow-scroll p-2 scrollbar-hide">
{messages.data?.slice(0).reverse().map((message, index) => (
| <Message message={message} key={index} />
))} |
<div className="h-0 w-0" ref={messagesEndRef} />
</div>
<form className="flex w-full" onSubmit={onSubmit}>
<TextInput placeholder="Message" value={message} setValue={setMessage} />
<button className="h-8 w-20" type="submit">Send</button>
</form>
</div>
);
}
| src/components/chat/ChatBox.tsx | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/pages/index.tsx",
"retrieved_chunk": " </h1>\n {sessionData &&\n <div className=\"flex h-full w-full flex-col gap-5 md:h-128 md:flex-row-reverse\">\n <TodoBox />\n <SelectPageWrapper />\n </div>\n }\n <div className=\"flex items-center\">\n <AuthShowcase />\n </div>",
"score": 89.80491738335165
},
{
"filename": "src/components/todo/TodoBox.tsx",
"retrieved_chunk": "import { api } from \"~/utils/api\";\nimport Todo from \"~/components/todo/Todo\";\nexport function TodoBox() {\n const todos = api.todo.findAll.useQuery();\n return (\n <div className=\"flex w-full flex-col gap-2\">\n <div className=\"flex flex-col gap-3 overflow-scroll rounded scrollbar-hide\">\n {todos.data?.map((todo, index) => (\n <Todo todo={todo} key={index} />\n ))}",
"score": 85.99180833312028
},
{
"filename": "src/components/chat/SelectCharacterBox.tsx",
"retrieved_chunk": "import { api } from \"~/utils/api\";\nimport { Character } from \"~/components/chat/Character\";\nexport function SelectCharacterBox(props: { goToChat: () => void }) {\n const characters = api.character.findAll.useQuery();\n return (\n <div className=\"ronded flex h-full w-full flex-col items-center gap-3 pl-2 pr-2 pt-3\">\n {characters.data?.map((character, index) => (\n <Character\n character={character}\n key={index}",
"score": 79.03475858532997
},
{
"filename": "src/components/chat/Character.tsx",
"retrieved_chunk": " className=\"flex h-8 w-full items-center rounded bg-white pl-2\"\n >\n <p className=\"text-1xl \"><b>{character.name}</b>, {character.content}</p>\n </button>\n );\n}",
"score": 76.5529439202767
},
{
"filename": "src/components/todo/Todo.tsx",
"retrieved_chunk": " <div className=\"flex-1\" />\n <button className=\"text-white rounded-full bg-red-500 w-8 h-8\" onClick={deleteTodoHandler}>X</button>\n </div>\n </div>\n );\n}",
"score": 76.43656563316742
}
] | typescript | <Message message={message} key={index} />
))} |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
const chatGptResponse = await createOpenAICompletion(
{
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: | parseActionCode(message.content),
} as ChatGPTMessage; |
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " if (message.isGPT) {\n const actions = parseActionCode(message.content);\n for (const action of actions) {\n if (action.type === \"print\") {\n return action.content;\n }\n }\n return \"\";\n }\n return message.content;",
"score": 33.891940213017335
},
{
"filename": "src/components/chat/Message.tsx",
"retrieved_chunk": " {message.isGPT && message.character?.name && (\n <p className=\"text-green-500\">{message.character.name}</p>\n )}\n {visualContent.split(\"\\\\n\").map((line, index) => (\n <p key={index}>{line}</p>\n ))}\n </div>\n {message.isGPT && <div className=\"w-2/6\" />}\n </div>\n );",
"score": 30.440282385328324
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " content: message.type === \"assistant\" ? stringifyActionCode(message.actions) : message.content,\n role: message.type === \"assistant\" ? ChatCompletionRequestMessageRoleEnum.Assistant : ChatCompletionRequestMessageRoleEnum.User as ChatCompletionRequestMessageRoleEnum,\n };\n });\n messages = [{\n content: system,\n role: ChatCompletionRequestMessageRoleEnum.System,\n }, ...messages];\n // Run some checks to prevent abuse\n if (messages.length >= 7) {",
"score": 29.91382298020433
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " if (todoList.find((todo) => todo.id === action.id) === undefined) {\n throw new Error(`Invalid todo id ${action.id}`);\n }\n }\n }\n return {\n type: \"assistant\",\n characterName: currentCharacter.characterName,\n characterDescription: currentCharacter.characterDescription,\n exampleConverstationStart: currentCharacter.exampleConverstationStart,",
"score": 20.513269191029266
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": " throw new Error(\"Too many messages\");\n }\n if (todoList.length >= 10) {\n throw new Error(\"Too many todo items\");\n }\n for (const message of messages) {\n if (message.content.length >= 2048) {\n throw new Error(\"Message too long\");\n }\n }",
"score": 18.553743702836968
}
] | typescript | parseActionCode(message.content),
} as ChatGPTMessage; |
import { type GetServerSidePropsContext } from "next";
import {
getServerSession,
type NextAuthOptions,
type DefaultSession,
} from "next-auth";
import GoogleProvider from "next-auth/providers/google";
import EmailProvider from "next-auth/providers/email";
import { PrismaAdapter } from "@next-auth/prisma-adapter";
import { env } from "~/env.mjs";
import { prisma } from "~/server/db";
/**
* Module augmentation for `next-auth` types. Allows us to add custom properties to the `session`
* object and keep type safety.
*
* @see https://next-auth.js.org/getting-started/typescript#module-augmentation
*/
declare module "next-auth" {
interface Session extends DefaultSession {
user: {
id: string;
// ...other properties
// role: UserRole;
} & DefaultSession["user"];
}
// interface User {
// // ...other properties
// // role: UserRole;
// }
}
/**
* Options for NextAuth.js used to configure adapters, providers, callbacks, etc.
*
* @see https://next-auth.js.org/configuration/options
*/
export const authOptions: NextAuthOptions = {
callbacks: {
session: ({ session, user }) => ({
...session,
user: {
...session.user,
id: user.id,
},
}),
},
adapter: PrismaAdapter( | prisma),
providers: [
EmailProvider({ |
server: {
host: env.EMAIL_SERVER_HOST,
port: env.EMAIL_SERVER_PORT,
auth: {
user: env.EMAIL_SERVER_USER,
pass: env.EMAIL_SERVER_PASSWORD
}
},
from: env.EMAIL_FROM
}),
GoogleProvider({
clientId: env.GOOGLE_CLIENT_ID,
clientSecret: env.GOOGLE_CLIENT_SECRET,
}),
/**
* ...add more providers here.
*
* Most other providers require a bit more work than the Discord provider. For example, the
* GitHub provider requires you to add the `refresh_token_expires_in` field to the Account
* model. Refer to the NextAuth.js docs for the provider you want to use. Example:
*
* @see https://next-auth.js.org/providers/github
*/
]
};
/**
* Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.
*
* @see https://next-auth.js.org/configuration/nextjs
*/
export const getServerAuthSession = (ctx: {
req: GetServerSidePropsContext["req"];
res: GetServerSidePropsContext["res"];
}) => {
return getServerSession(ctx.req, ctx.res, authOptions);
};
| src/server/auth.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.user.update({\n where: {\n id: ctx.session.user.id,\n },\n data: {\n activeCharacterId: input.id,\n },\n });\n }),",
"score": 13.517032715865497
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " generateGPT: protectedProcedure.mutation(async ({ ctx }) => {\n const todoList = await ctx.prisma.todo.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n });\n const lastNMessages = await ctx.prisma.message.findMany({\n where: {\n authorId: ctx.session.user.id,\n },",
"score": 12.325233352276822
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " return ctx.prisma.todo.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n });\n }),\n});",
"score": 12.142065758092198
},
{
"filename": "src/server/api/routers/message.ts",
"retrieved_chunk": " where: {\n authorId: ctx.session.user.id,\n },\n });\n }),\n});",
"score": 11.662846870251704
},
{
"filename": "src/server/api/routers/me.ts",
"retrieved_chunk": "import { createTRPCRouter, protectedProcedure } from \"~/server/api/trpc\";\nexport const meRouter = createTRPCRouter({\n getMe: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.user.findUnique({\n where: {\n id: ctx.session.user.id,\n },\n include: {\n activeCharacter: true,\n },",
"score": 11.474593551273568
}
] | typescript | prisma),
providers: [
EmailProvider({ |
/**
* YOU PROBABLY DON'T NEED TO EDIT THIS FILE, UNLESS:
* 1. You want to modify request context (see Part 1).
* 2. You want to create a new middleware or type of procedure (see Part 3).
*
* TL;DR - This is where all the tRPC server stuff is created and plugged in. The pieces you will
* need to use are documented accordingly near the end.
*/
/**
* 1. CONTEXT
*
* This section defines the "contexts" that are available in the backend API.
*
* These allow you to access things when processing a request, like the database, the session, etc.
*/
import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import { type Session } from "next-auth";
import { getServerAuthSession } from "~/server/auth";
import { prisma } from "~/server/db";
type CreateContextOptions = {
session: Session | null;
};
/**
* This helper generates the "internals" for a tRPC context. If you need to use it, you can export
* it from here.
*
* Examples of things you may need it for:
* - testing, so we don't have to mock Next.js' req/res
* - tRPC's `createSSGHelpers`, where we don't have req/res
*
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
*/
const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
session: opts.session,
prisma,
};
};
/**
* This is the actual context you will use in your router. It will be used to process every request
* that goes through your tRPC endpoint.
*
* @see https://trpc.io/docs/context
*/
export const createTRPCContext = async (opts: CreateNextContextOptions) => {
const { req, res } = opts;
// Get the session from the server using the getServerSession wrapper function
const session = await | getServerAuthSession({ req, res }); |
return createInnerTRPCContext({
session,
});
};
/**
* 2. INITIALIZATION
*
* This is where the tRPC API is initialized, connecting the context and transformer. We also parse
* ZodErrors so that you get typesafety on the frontend if your procedure fails due to validation
* errors on the backend.
*/
import { initTRPC, TRPCError } from "@trpc/server";
import superjson from "superjson";
import { ZodError } from "zod";
const t = initTRPC.context<typeof createTRPCContext>().create({
transformer: superjson,
errorFormatter({ shape, error }) {
return {
...shape,
data: {
...shape.data,
zodError:
error.cause instanceof ZodError ? error.cause.flatten() : null,
},
};
},
});
/**
* 3. ROUTER & PROCEDURE (THE IMPORTANT BIT)
*
* These are the pieces you use to build your tRPC API. You should import these a lot in the
* "/src/server/api/routers" directory.
*/
/**
* This is how you create new routers and sub-routers in your tRPC API.
*
* @see https://trpc.io/docs/router
*/
export const createTRPCRouter = t.router;
/**
* Public (unauthenticated) procedure
*
* This is the base piece you use to build new queries and mutations on your tRPC API. It does not
* guarantee that a user querying is authorized, but you can still access user session data if they
* are logged in.
*/
export const publicProcedure = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceUserIsAuthed = t.middleware(({ ctx, next }) => {
if (!ctx.session || !ctx.session.user) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return next({
ctx: {
// infers the `session` as non-nullable
session: { ...ctx.session, user: ctx.session.user },
},
});
});
/**
* Protected (authenticated) procedure
*
* If you want a query or mutation to ONLY be accessible to logged in users, use this. It verifies
* the session is valid and guarantees `ctx.session.user` is not null.
*
* @see https://trpc.io/docs/procedures
*/
export const protectedProcedure = t.procedure.use(enforceUserIsAuthed);
| src/server/api/trpc.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " */\n ]\n};\n/**\n * Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.\n *\n * @see https://next-auth.js.org/configuration/nextjs\n */\nexport const getServerAuthSession = (ctx: {\n req: GetServerSidePropsContext[\"req\"];",
"score": 50.34883882877885
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " /**\n * Transformer used for data de-serialization from the server.\n *\n * @see https://trpc.io/docs/data-transformers\n */\n transformer: superjson,\n /**\n * Links used to determine request flow from client to server.\n *\n * @see https://trpc.io/docs/links",
"score": 45.92816377933063
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "/**\n * This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which\n * contains the Next.js App-wrapper, as well as your type-safe React Query hooks.\n *\n * We also create a few inference helpers for input and output types.\n */\nimport { httpBatchLink, loggerLink } from \"@trpc/client\";\nimport { createTRPCNext } from \"@trpc/next\";\nimport { type inferRouterInputs, type inferRouterOutputs } from \"@trpc/server\";\nimport superjson from \"superjson\";",
"score": 40.00523208758211
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "Today is the ${new Date().toDateString()}.\nThe user will send a text, and Tod-GPT will respond with a command. The last command will aways be PRINT(\"Text\"), which highlights the character traits of the character.\nUser:\nHi, i'm your user. Remind me to ${exampleTodoItem} tomorrow.\nTod-GPT:\nADD(${(new Date()).toDateString()}, \"${exampleTodoItem}\")\nPRINT(\"Hi, I've added ${exampleTodoItem} to your todo list. ${currentCharacter.exampleConverstationStart}.\")\n`;\n let messages = chatHistory.map((message) => {\n return {",
"score": 34.35351579666494
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " res: GetServerSidePropsContext[\"res\"];\n}) => {\n return getServerSession(ctx.req, ctx.res, authOptions);\n};",
"score": 33.616418928471774
}
] | typescript | getServerAuthSession({ req, res }); |
import { createTRPCRouter, protectedProcedure } from "~/server/api/trpc";
import { z } from "zod";
import { createOpenAICompletion } from "~/external/openai/chatGPTApi";
import { ChatGPTMessage } from "~/external/openai/chatGPTMessage";
import { parseActionCode, stringifyActionCode } from "~/external/openai/chatGPTActionItems";
export const messageRouter = createTRPCRouter({
create: protectedProcedure
.input(
z.object({
content: z.string().min(1).max(200),
})
)
.mutation(({ input, ctx }) => {
return ctx.prisma.message.create({
data: {
content: input.content,
authorId: ctx.session.user.id,
},
});
}),
generateGPT: protectedProcedure.mutation(async ({ ctx }) => {
const todoList = await ctx.prisma.todo.findMany({
where: {
authorId: ctx.session.user.id,
},
});
const lastNMessages = await ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
orderBy: {
createdAt: "desc",
},
take: 5,
include: {
character: true,
},
});
const character = await ctx.prisma.user.findUnique({
where: {
id: ctx.session.user.id,
},
}).activeCharacter();
| const chatGptResponse = await createOpenAICompletion(
{ |
type: "assistant",
characterDescription: character?.content ?? "The depressed robot from Hitchhiker's Guide to the Galaxy",
characterName: character?.name ?? "Marvin",
exampleConverstationStart: character?.exampleConverstationStart ?? "Here I am, brain the size of a planet, and this is what they ask me to do",
actions: []
},
todoList,
lastNMessages.reverse().map((message) => {
if (message.isGPT) {
return {
type: "assistant",
characterDescription: message.character?.content,
characterName: message.character?.name,
actions: parseActionCode(message.content),
} as ChatGPTMessage;
}
return {
type: "user",
content: message.content,
} as ChatGPTMessage;
}),
);
for (const action of chatGptResponse.actions) {
if (action.type === "add") {
await ctx.prisma.todo.create({
data: {
title: action.content,
due: action.due,
authorId: ctx.session.user.id,
},
});
}
if (action.type === "complete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: true,
},
});
}
if (action.type === "delete") {
await ctx.prisma.todo.delete({
where: {
id: action.id,
},
});
}
if (action.type === "uncomplete") {
await ctx.prisma.todo.update({
where: {
id: action.id,
},
data: {
done: false,
},
});
}
}
return ctx.prisma.message.create({
data: {
content: stringifyActionCode(chatGptResponse.actions),
authorId: ctx.session.user.id,
isGPT: true,
characterId: character?.id,
},
});
}),
findAll: protectedProcedure.query(({ ctx }) => {
return ctx.prisma.message.findMany({
where: {
authorId: ctx.session.user.id,
},
include: {
character: true,
},
take: 6,
orderBy: {
createdAt: "desc",
},
});
}),
deleteAll: protectedProcedure.mutation(({ ctx }) => {
return ctx.prisma.message.deleteMany({
where: {
authorId: ctx.session.user.id,
},
});
}),
});
| src/server/api/routers/message.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/api/routers/me.ts",
"retrieved_chunk": "import { createTRPCRouter, protectedProcedure } from \"~/server/api/trpc\";\nexport const meRouter = createTRPCRouter({\n getMe: protectedProcedure.query(({ ctx }) => {\n return ctx.prisma.user.findUnique({\n where: {\n id: ctx.session.user.id,\n },\n include: {\n activeCharacter: true,\n },",
"score": 32.533001108847955
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " id: z.string(),\n })\n )\n .mutation(async ({ input, ctx }) => {\n const todo = await ctx.prisma.todo.findFirst({\n where: {\n id: input.id,\n authorId: ctx.session.user.id,\n },\n });",
"score": 25.695336800386546
},
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.user.update({\n where: {\n id: ctx.session.user.id,\n },\n data: {\n activeCharacterId: input.id,\n },\n });\n }),",
"score": 22.56070275782058
},
{
"filename": "src/server/api/routers/todo.ts",
"retrieved_chunk": " return ctx.prisma.todo.findMany({\n where: {\n authorId: ctx.session.user.id,\n },\n });\n }),\n});",
"score": 21.46355946625083
},
{
"filename": "src/server/api/routers/character.ts",
"retrieved_chunk": " .mutation(({ input, ctx }) => {\n return ctx.prisma.character.create({\n data: {\n name: input.name,\n content: input.content,\n authorId: ctx.session.user.id,\n },\n });\n }),\n findAll: publicProcedure.query(({ ctx }) => {",
"score": 21.17590291132858
}
] | typescript | const chatGptResponse = await createOpenAICompletion(
{ |
/**
* YOU PROBABLY DON'T NEED TO EDIT THIS FILE, UNLESS:
* 1. You want to modify request context (see Part 1).
* 2. You want to create a new middleware or type of procedure (see Part 3).
*
* TL;DR - This is where all the tRPC server stuff is created and plugged in. The pieces you will
* need to use are documented accordingly near the end.
*/
/**
* 1. CONTEXT
*
* This section defines the "contexts" that are available in the backend API.
*
* These allow you to access things when processing a request, like the database, the session, etc.
*/
import { type CreateNextContextOptions } from "@trpc/server/adapters/next";
import { type Session } from "next-auth";
import { getServerAuthSession } from "~/server/auth";
import { prisma } from "~/server/db";
type CreateContextOptions = {
session: Session | null;
};
/**
* This helper generates the "internals" for a tRPC context. If you need to use it, you can export
* it from here.
*
* Examples of things you may need it for:
* - testing, so we don't have to mock Next.js' req/res
* - tRPC's `createSSGHelpers`, where we don't have req/res
*
* @see https://create.t3.gg/en/usage/trpc#-serverapitrpcts
*/
const createInnerTRPCContext = (opts: CreateContextOptions) => {
return {
session: opts.session,
prisma,
};
};
/**
* This is the actual context you will use in your router. It will be used to process every request
* that goes through your tRPC endpoint.
*
* @see https://trpc.io/docs/context
*/
export const createTRPCContext = async (opts: CreateNextContextOptions) => {
const { req, res } = opts;
// Get the session from the server using the getServerSession wrapper function
const session = | await getServerAuthSession({ req, res }); |
return createInnerTRPCContext({
session,
});
};
/**
* 2. INITIALIZATION
*
* This is where the tRPC API is initialized, connecting the context and transformer. We also parse
* ZodErrors so that you get typesafety on the frontend if your procedure fails due to validation
* errors on the backend.
*/
import { initTRPC, TRPCError } from "@trpc/server";
import superjson from "superjson";
import { ZodError } from "zod";
const t = initTRPC.context<typeof createTRPCContext>().create({
transformer: superjson,
errorFormatter({ shape, error }) {
return {
...shape,
data: {
...shape.data,
zodError:
error.cause instanceof ZodError ? error.cause.flatten() : null,
},
};
},
});
/**
* 3. ROUTER & PROCEDURE (THE IMPORTANT BIT)
*
* These are the pieces you use to build your tRPC API. You should import these a lot in the
* "/src/server/api/routers" directory.
*/
/**
* This is how you create new routers and sub-routers in your tRPC API.
*
* @see https://trpc.io/docs/router
*/
export const createTRPCRouter = t.router;
/**
* Public (unauthenticated) procedure
*
* This is the base piece you use to build new queries and mutations on your tRPC API. It does not
* guarantee that a user querying is authorized, but you can still access user session data if they
* are logged in.
*/
export const publicProcedure = t.procedure;
/** Reusable middleware that enforces users are logged in before running the procedure. */
const enforceUserIsAuthed = t.middleware(({ ctx, next }) => {
if (!ctx.session || !ctx.session.user) {
throw new TRPCError({ code: "UNAUTHORIZED" });
}
return next({
ctx: {
// infers the `session` as non-nullable
session: { ...ctx.session, user: ctx.session.user },
},
});
});
/**
* Protected (authenticated) procedure
*
* If you want a query or mutation to ONLY be accessible to logged in users, use this. It verifies
* the session is valid and guarantees `ctx.session.user` is not null.
*
* @see https://trpc.io/docs/procedures
*/
export const protectedProcedure = t.procedure.use(enforceUserIsAuthed);
| src/server/api/trpc.ts | hackathon-ufrt-gptnotes-e185e8c | [
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " */\n ]\n};\n/**\n * Wrapper for `getServerSession` so that you don't need to import the `authOptions` in every file.\n *\n * @see https://next-auth.js.org/configuration/nextjs\n */\nexport const getServerAuthSession = (ctx: {\n req: GetServerSidePropsContext[\"req\"];",
"score": 50.34883882877885
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": " /**\n * Transformer used for data de-serialization from the server.\n *\n * @see https://trpc.io/docs/data-transformers\n */\n transformer: superjson,\n /**\n * Links used to determine request flow from client to server.\n *\n * @see https://trpc.io/docs/links",
"score": 45.92816377933063
},
{
"filename": "src/utils/api.ts",
"retrieved_chunk": "/**\n * This is the client-side entrypoint for your tRPC API. It is used to create the `api` object which\n * contains the Next.js App-wrapper, as well as your type-safe React Query hooks.\n *\n * We also create a few inference helpers for input and output types.\n */\nimport { httpBatchLink, loggerLink } from \"@trpc/client\";\nimport { createTRPCNext } from \"@trpc/next\";\nimport { type inferRouterInputs, type inferRouterOutputs } from \"@trpc/server\";\nimport superjson from \"superjson\";",
"score": 40.00523208758211
},
{
"filename": "src/external/openai/chatGPTApi.ts",
"retrieved_chunk": "Today is the ${new Date().toDateString()}.\nThe user will send a text, and Tod-GPT will respond with a command. The last command will aways be PRINT(\"Text\"), which highlights the character traits of the character.\nUser:\nHi, i'm your user. Remind me to ${exampleTodoItem} tomorrow.\nTod-GPT:\nADD(${(new Date()).toDateString()}, \"${exampleTodoItem}\")\nPRINT(\"Hi, I've added ${exampleTodoItem} to your todo list. ${currentCharacter.exampleConverstationStart}.\")\n`;\n let messages = chatHistory.map((message) => {\n return {",
"score": 34.35351579666494
},
{
"filename": "src/server/auth.ts",
"retrieved_chunk": " res: GetServerSidePropsContext[\"res\"];\n}) => {\n return getServerSession(ctx.req, ctx.res, authOptions);\n};",
"score": 33.616418928471774
}
] | typescript | await getServerAuthSession({ req, res }); |
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
| export class ReportConfigurationEntry implements IReportConfigurationEntry { |
public Id
public Info
public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
| src/Report.Definitions.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)\n categoryEntries.forEach((entry) => {\n entry.Info.Value = entry.PullRequestCallback(pr)\n })\n const rows = categoryEntries.map((entry) => ({\n Description: entry.Info.Description,\n Value: entry.Info.Value,\n }))\n return table({\n columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],",
"score": 32.760763645368584
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " tables.push(this.GenerateCategoryTable(pr, report, category))\n })\n return tables\n }\n private GenerateCategoryTitle(measureCategory: ConfigurationCategory): H3Entry {\n const title = { h3: `${ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` }\n return title\n }\n private GenerateCategoryTable(pr: IPullRequest, report: IReport, measureCategory: ConfigurationCategory): TableEntry {\n const entries = this.GetMeasurementEntries(report.Entries)",
"score": 24.81456706179777
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))\n }\n return []\n }\n public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {\n const tables: MarkdownEntry[] = []\n const entries = this.GetMeasurementEntries(report.Entries)\n const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))\n categories.forEach((category) => {\n tables.push(this.GenerateCategoryTitle(category))",
"score": 23.751137924140547
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport { IPullRequest } from './Interfaces/PullRequestTypes'\nimport { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'\nimport { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'\nimport { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'\nexport class ReportGenerator {\n DescriptionHeaderLabel = 'Description'\n ValueHeaderLabel = 'Value'\n public Generate(pr: IPullRequest, report: IReport): string {\n const header = this.GenerateHeader(pr, report)",
"score": 22.853815650762733
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const table = this.GenerateMeasureTable(pr, report)\n const reportElements = [header, ...table]\n return tsMarkdown(reportElements)\n }\n public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {\n const title = { h1: `${report.Description} (#${pr.id})` }\n return title\n }\n public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {\n if (entries !== undefined && entries !== null && entries.length > 0) {",
"score": 21.382449263677394
}
] | typescript | export class ReportConfigurationEntry implements IReportConfigurationEntry { |
// for license and copyright look at the repository
import { IPullRequest } from './Interfaces/PullRequestTypes'
import { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'
import { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'
import { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'
export class ReportGenerator {
DescriptionHeaderLabel = 'Description'
ValueHeaderLabel = 'Value'
public Generate(pr: IPullRequest, report: IReport): string {
const header = this.GenerateHeader(pr, report)
const table = this.GenerateMeasureTable(pr, report)
const reportElements = [header, ...table]
return tsMarkdown(reportElements)
}
public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {
const title = { h1: `${report.Description} (#${pr.id})` }
return title
}
public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {
if (entries !== undefined && entries !== null && entries.length > 0) {
return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))
}
return []
}
public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {
const tables: MarkdownEntry[] = []
const entries = this.GetMeasurementEntries(report.Entries)
const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))
categories.forEach((category) => {
tables.push(this.GenerateCategoryTitle(category))
tables.push(this.GenerateCategoryTable(pr, report, category))
})
return tables
}
private GenerateCategoryTitle(measureCategory: ConfigurationCategory): H3Entry {
const title = { h3: `${ | ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` } |
return title
}
private GenerateCategoryTable(pr: IPullRequest, report: IReport, measureCategory: ConfigurationCategory): TableEntry {
const entries = this.GetMeasurementEntries(report.Entries)
const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)
categoryEntries.forEach((entry) => {
entry.Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({
Description: entry.Info.Description,
Value: entry.Info.Value,
}))
return table({
columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],
rows: rows,
})
}
}
| src/Report.Generation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 30.104709048808463
},
{
"filename": "src/Report.Definitions.ts",
"retrieved_chunk": " ) {\n this.Description = label\n this.PresentationValue = presentationValue\n this.Value = value\n this.ConfigurationName = configName\n this.ConfigValue = defaultConfigValue\n this.ConfigurationCategory = configurationCategory\n }\n}\nexport class ReportConfigurationEntry implements IReportConfigurationEntry {",
"score": 18.34575977790682
},
{
"filename": "src/Report.Definitions.ts",
"retrieved_chunk": " public Id\n public Info\n public PullRequestCallback: PullRequestCallback\n constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {\n this.Id = id\n this.Info = info\n this.PullRequestCallback = measureCallback\n }\n}\nexport class Report implements IReport {",
"score": 17.912470248560815
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " GetReviewCount,\n GetCommentCount,\n} from './Report.Functions'\nimport { ConfigurationInputs } from './action.config.type'\nexport const UpdateConfigValues = (\n configValues: ConfigurationInputs,\n measurementEntries: Array<ReportConfigurationEntry>,\n): Array<ReportConfigurationEntry> => {\n // Update measurementEntries with config values from inputs\n measurementEntries.forEach((entry) => {",
"score": 11.414790803427882
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " 'ShowTimeToMergeAfterLastReview',\n 'yes',\n ConfigurationCategory.TimeRelatedMeasures,\n ),\n (pr) => MillisecondsToReadableDuration(GetTimeToMergeAfterLastReview(pr)),\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'no_of_comment_only_reviews',",
"score": 9.612896886905455
}
] | typescript | ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` } |
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: [email protected]
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: [email protected]
* password: password123
*/
import express from 'express';
import { transferFund } from '../service';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/account/transfer:
* post:
* tags:
* - Transfer
* summary: Transfer funds between accounts
* security:
* - BearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* fromAccountId:
* type: string
* description: The ID of the account to transfer funds from.
* example: "123456"
* toAccountId:
* type: string
* description: The ID of the account to transfer funds to.
* example: "789012"
* amount:
* type: number
* description: The amount of funds to transfer.
* example: 1000.00
* tag:
* type: string
* description: The tag associated with the transfer.
* example: "Rent payment"
* responses:
* '200':
* description: Successful transfer of funds
* '400':
* description: Invalid request parameters
* '401':
* description: Unauthorized request
*/
router.post(' | /transfer', protect, transferFund); |
export default router;
| src/modules/account/controller/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 30.570755577302958
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * application/json:\n * schema:\n * type: object\n * properties:\n * accessToken:\n * type: string\n * description: Access token\n * example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMn0.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c\n * \"400\":\n * description: Invalid request or refresh token is not present",
"score": 21.521564838341476
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * description: The ID of the user to delete\n * responses:\n * \"204\":\n * description: User deleted successfully\n * \"401\":\n * description: Unauthorized\n * \"404\":\n * description: User not found\n */\n// A simple case where users can only delete themselves not the admin",
"score": 19.39768517282195
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * items:\n * $ref: '#/components/schemas/User'\n * \"401\":\n * description: Unauthorized\n */\nrouter.get('/', protect, restrictTo('admin'), fetchUsers);\n/**\n * @swagger\n * /api/v1/users/{id}:\n * delete:",
"score": 18.199488965442363
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * \"401\":\n * description: Invalid or expired token or refresh token was already used\n */\nrouter.post('/refresh', refreshMiddleware, refresh);\n/**\n * @swagger\n * /api/v1/auth/me:\n * post:\n * summary: Get user profile\n * tags: [Auth]",
"score": 16.008819285922844
}
] | typescript | /transfer', protect, transferFund); |
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: [email protected]
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: [email protected]
* password: password123
*/
import express from 'express';
import { transferFund } from '../service';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/account/transfer:
* post:
* tags:
* - Transfer
* summary: Transfer funds between accounts
* security:
* - BearerAuth: []
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* properties:
* fromAccountId:
* type: string
* description: The ID of the account to transfer funds from.
* example: "123456"
* toAccountId:
* type: string
* description: The ID of the account to transfer funds to.
* example: "789012"
* amount:
* type: number
* description: The amount of funds to transfer.
* example: 1000.00
* tag:
* type: string
* description: The tag associated with the transfer.
* example: "Rent payment"
* responses:
* '200':
* description: Successful transfer of funds
* '400':
* description: Invalid request parameters
* '401':
* description: Unauthorized request
*/
| router.post('/transfer', protect, transferFund); |
export default router;
| src/modules/account/controller/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 30.570755577302958
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * application/json:\n * schema:\n * type: object\n * properties:\n * accessToken:\n * type: string\n * description: Access token\n * example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMn0.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c\n * \"400\":\n * description: Invalid request or refresh token is not present",
"score": 24.51954538068032
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * description: The ID of the user to delete\n * responses:\n * \"204\":\n * description: User deleted successfully\n * \"401\":\n * description: Unauthorized\n * \"404\":\n * description: User not found\n */\n// A simple case where users can only delete themselves not the admin",
"score": 19.39768517282195
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * items:\n * $ref: '#/components/schemas/User'\n * \"401\":\n * description: Unauthorized\n */\nrouter.get('/', protect, restrictTo('admin'), fetchUsers);\n/**\n * @swagger\n * /api/v1/users/{id}:\n * delete:",
"score": 18.199488965442363
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * - refresh\n * properties:\n * refresh:\n * type: string\n * description: Refresh token\n * example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY0NGYwMjg0MWRmNGJlYzliOWI3ZjlhYSIsImlhdCI6MTY4Mjg5OTU4OCwiZXhwIjoxNjgzMDcyMzg4fQ.Bt2kzyxyUEtUy9pLvr0zSzpI8_xTaM6KulO2mwYztbQ\n * responses:\n * \"200\":\n * description: The new access token\n * content:",
"score": 17.031513004481877
}
] | typescript | router.post('/transfer', protect, transferFund); |
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
| event_instance: filteredEvents[i].event_instance,
})
} else { |
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
| src/Report.Calculation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " Entries: IReportConfigurationEntry[]\n}\nexport interface EventWithTime {\n type: string\n date: Date\n time: number\n event_instance: unknown\n}",
"score": 37.079791192691225
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": "// for license and copyright look at the repository\nimport {\n type IPullRequest,\n type IFileChangeSummary,\n type IPullRequestComment,\n type IPullRequestCommit,\n type IPullRequestReview,\n type IStatusCheck,\n ICommitAuthor,\n} from './Interfaces/PullRequestTypes'",
"score": 14.277769152703863
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": ")\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'pr_lead_time',\n new ConfigurationInfo(\n 'PR lead time (from creation to close of PR)',\n 0,\n 0,\n 'ShowPRLeadTime',\n 'yes',",
"score": 13.493875888184721
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " (pr) => MillisecondsToReadableDuration(GetTotalRuntimeForLastStatusCheckRun(pr)),\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'pr_time_spend_in_pr_for_last_status_check_run',\n new ConfigurationInfo(\n 'Total time spend in last status check run on PR',\n 0,\n 0,",
"score": 12.857792131009438
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " () => 0,\n ),\n)\nReportConfigurationTable.push(\n new ReportConfigurationEntry(\n 'create_report_comment',\n new ConfigurationInfo(\n 'Add PR report to the PR as comment',\n 0,\n 0,",
"score": 8.617486736287255
}
] | typescript | event_instance: filteredEvents[i].event_instance,
})
} else { |
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date( | statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
} |
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
| src/Report.Calculation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " statusCheck.completedAt = jsonObject['completedAt']\n statusCheck.conclusion = jsonObject['conclusion']\n statusCheck.status = jsonObject['status']\n statusCheck.name = jsonObject['name']\n return statusCheck\n }\n}\nexport class PullRequest implements IPullRequest {\n public id = 0\n public title = ''",
"score": 28.385886758690734
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " startedAt: string\n completedAt: string\n conclusion: string\n status: string\n name: string\n detailsUrl: string\n }\n const statusCheck = new StatusCheck()\n statusCheck.workflowName = jsonObject['workflowName']\n statusCheck.startedAt = jsonObject['startedAt']",
"score": 26.51494471409145
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " PullRequestComment.CreateFromJson(comment),\n )\n pr.statusChecks = ParseArrayOfType<IStatusCheck>(cliPullRequestObject['statusCheckRollup'], (statusCheck) =>\n StatusCheck.CreateFromJson(statusCheck),\n )\n pr.fileChangeSummary = FileChangeSummary.CreateFromJson(cliPullRequestObject)\n return pr\n }\n}",
"score": 18.106832844962124
},
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " Entries: IReportConfigurationEntry[]\n}\nexport interface EventWithTime {\n type: string\n date: Date\n time: number\n event_instance: unknown\n}",
"score": 10.842392118250268
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const table = this.GenerateMeasureTable(pr, report)\n const reportElements = [header, ...table]\n return tsMarkdown(reportElements)\n }\n public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {\n const title = { h1: `${report.Description} (#${pr.id})` }\n return title\n }\n public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {\n if (entries !== undefined && entries !== null && entries.length > 0) {",
"score": 10.029686114226207
}
] | typescript | statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
} |
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
export class ReportConfigurationEntry implements IReportConfigurationEntry {
public Id
public Info
| public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') { |
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
| src/Report.Definitions.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 40.44542370608348
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)\n categoryEntries.forEach((entry) => {\n entry.Info.Value = entry.PullRequestCallback(pr)\n })\n const rows = categoryEntries.map((entry) => ({\n Description: entry.Info.Description,\n Value: entry.Info.Value,\n }))\n return table({\n columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],",
"score": 26.959267778118704
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))\n }\n return []\n }\n public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {\n const tables: MarkdownEntry[] = []\n const entries = this.GetMeasurementEntries(report.Entries)\n const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))\n categories.forEach((category) => {\n tables.push(this.GenerateCategoryTitle(category))",
"score": 21.1908498927339
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const table = this.GenerateMeasureTable(pr, report)\n const reportElements = [header, ...table]\n return tsMarkdown(reportElements)\n }\n public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {\n const title = { h1: `${report.Description} (#${pr.id})` }\n return title\n }\n public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {\n if (entries !== undefined && entries !== null && entries.length > 0) {",
"score": 20.2353225732311
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": "export class PullRequestComment implements IPullRequestComment {\n public authorLogin = ''\n public createdAt = ''\n public body = ''\n public authorAssociation = ''\n public id = ''\n public url = ''\n public viewerDidAuthor = false\n public static CreateFromJson(json: unknown): IPullRequestComment {\n const jsonObject = json as {",
"score": 19.268140362623303
}
] | typescript | public PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') { |
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: [email protected]
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router. | get('/', protect, restrictTo('admin'), fetchUsers); |
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router.delete('/:id', restrictTo('user'), deleteUser);
export default router;
| src/modules/auth/controller/users.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * description: The authenticated user.\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/login', login);\n/**\n * @swagger\n * /api/v1/auth/refresh:",
"score": 23.36940559308816
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/signup', signup);\n/**\n * @swagger\n * /api/v1/auth/login:\n * post:",
"score": 22.615784206928453
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * summary: Login User\n * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/LoginRequest'\n * responses:\n * \"200\":",
"score": 19.57300590680708
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/SignupRequest'\n * responses:\n * \"200\":\n * description: The created user.",
"score": 19.21657302046459
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * description: Invalid request parameters\n * '401':\n * description: Unauthorized request\n */\nrouter.post('/transfer', protect, transferFund);\nexport default router;",
"score": 17.839611483388857
}
] | typescript | get('/', protect, restrictTo('admin'), fetchUsers); |
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => | statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) { |
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
}
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
| src/Report.Calculation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " statusCheck.completedAt = jsonObject['completedAt']\n statusCheck.conclusion = jsonObject['conclusion']\n statusCheck.status = jsonObject['status']\n statusCheck.name = jsonObject['name']\n return statusCheck\n }\n}\nexport class PullRequest implements IPullRequest {\n public id = 0\n public title = ''",
"score": 27.158476126213262
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " startedAt: string\n completedAt: string\n conclusion: string\n status: string\n name: string\n detailsUrl: string\n }\n const statusCheck = new StatusCheck()\n statusCheck.workflowName = jsonObject['workflowName']\n statusCheck.startedAt = jsonObject['startedAt']",
"score": 25.117596584557983
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " PullRequestComment.CreateFromJson(comment),\n )\n pr.statusChecks = ParseArrayOfType<IStatusCheck>(cliPullRequestObject['statusCheckRollup'], (statusCheck) =>\n StatusCheck.CreateFromJson(statusCheck),\n )\n pr.fileChangeSummary = FileChangeSummary.CreateFromJson(cliPullRequestObject)\n return pr\n }\n}",
"score": 20.438782652221818
},
{
"filename": "src/Report.Functions.ts",
"retrieved_chunk": "}\nexport const GetCommitsCount = (pr: IPullRequest): number => {\n return pr.fileChangeSummary.commits\n}\nexport const GetReviewCount = (pr: IPullRequest): number => {\n return pr.reviews.length\n}\nexport const GetCommentCount = (pr: IPullRequest): number => {\n return pr.comments.length\n}",
"score": 13.76414349350559
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))\n }\n return []\n }\n public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {\n const tables: MarkdownEntry[] = []\n const entries = this.GetMeasurementEntries(report.Entries)\n const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))\n categories.forEach((category) => {\n tables.push(this.GenerateCategoryTitle(category))",
"score": 13.515675687793419
}
] | typescript | statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) { |
// for license and copyright look at the repository
import { IPullRequest } from './Interfaces/PullRequestTypes'
import { IReport, IReportConfigurationEntry } from './Interfaces/ReportTypes'
import { tsMarkdown, table, TableEntry, H1Entry, H3Entry, MarkdownEntry } from 'ts-markdown'
import { ConfigurationCategory, ConfigurationCategoryTitleMap } from './Report.Definitions'
export class ReportGenerator {
DescriptionHeaderLabel = 'Description'
ValueHeaderLabel = 'Value'
public Generate(pr: IPullRequest, report: IReport): string {
const header = this.GenerateHeader(pr, report)
const table = this.GenerateMeasureTable(pr, report)
const reportElements = [header, ...table]
return tsMarkdown(reportElements)
}
public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {
const title = { h1: `${report.Description} (#${pr.id})` }
return title
}
public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {
if (entries !== undefined && entries !== null && entries.length > 0) {
return entries.filter((entry) => ConfigurationCategory[entry.Info.ConfigurationCategory].endsWith('Measures'))
}
return []
}
public GenerateMeasureTable(pr: IPullRequest, report: IReport): MarkdownEntry[] {
const tables: MarkdownEntry[] = []
const entries = this.GetMeasurementEntries(report.Entries)
const categories = new Set(entries.map((entry) => entry.Info.ConfigurationCategory))
categories.forEach((category) => {
tables.push(this.GenerateCategoryTitle(category))
tables.push(this.GenerateCategoryTable(pr, report, category))
})
return tables
}
private GenerateCategoryTitle(measureCategory: ConfigurationCategory): H3Entry {
const title = { h3: `${ConfigurationCategoryTitleMap.get(measureCategory) || 'No category'}` }
return title
}
private GenerateCategoryTable(pr: IPullRequest, report: IReport, measureCategory: ConfigurationCategory): TableEntry {
const entries = this.GetMeasurementEntries(report.Entries)
const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)
categoryEntries.forEach((entry) => {
entry | .Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({ |
Description: entry.Info.Description,
Value: entry.Info.Value,
}))
return table({
columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],
rows: rows,
})
}
}
| src/Report.Generation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " // get the property value of inputs\n entry.Info.ConfigValue = (configValues as { [key: string]: string | number })[entry.Info.ConfigurationName]\n })\n return measurementEntries\n}\nexport const GetActiveMeasures = (entries: Array<ReportConfigurationEntry>): Array<ReportConfigurationEntry> => {\n return entries.filter((entry) => entry.Info.ConfigValue === 'yes')\n}\nexport const ReportConfigurationTable = new Array<ReportConfigurationEntry>()\nReportConfigurationTable.push(",
"score": 62.35242390620066
},
{
"filename": "src/Report.Measures.ts",
"retrieved_chunk": " GetReviewCount,\n GetCommentCount,\n} from './Report.Functions'\nimport { ConfigurationInputs } from './action.config.type'\nexport const UpdateConfigValues = (\n configValues: ConfigurationInputs,\n measurementEntries: Array<ReportConfigurationEntry>,\n): Array<ReportConfigurationEntry> => {\n // Update measurementEntries with config values from inputs\n measurementEntries.forEach((entry) => {",
"score": 26.526864098212908
},
{
"filename": "src/Report.Definitions.ts",
"retrieved_chunk": " public Id\n public Info\n public PullRequestCallback: PullRequestCallback\n constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') {\n this.Id = id\n this.Info = info\n this.PullRequestCallback = measureCallback\n }\n}\nexport class Report implements IReport {",
"score": 25.471183156582892
},
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 16.823084458084256
},
{
"filename": "src/run.ts",
"retrieved_chunk": " report.Entries = activeConfigValues\n report.Description = 'Test report'\n report.Id = pullRequestDataModel.id.toString()\n return report\n}\nconst IsConfigValueYes = (configValue: string): boolean => {\n return configValue.trim().toLowerCase() === 'yes'\n}\nexport const run = async (inputsFromWorkflow: ConfigurationInputs): Promise<number> => {\n // take care that action is running only in PR context",
"score": 15.231306869881621
}
] | typescript | .Info.Value = entry.PullRequestCallback(pr)
})
const rows = categoryEntries.map((entry) => ({ |
// for license and copyright look at the repository
import { IReport, IReportConfigurationEntry, IReportConfigInfo, PullRequestCallback } from './Interfaces/ReportTypes'
export enum ConfigurationCategory {
None,
StaticMeasures,
TimeRelatedMeasures,
StatusCheckRelatedMeasures,
ReportGeneratorValue,
}
export const ConfigurationCategoryTitleMap = new Map<ConfigurationCategory, string>([
[ConfigurationCategory.None, 'None'],
[ConfigurationCategory.StaticMeasures, 'Static measures'],
[ConfigurationCategory.TimeRelatedMeasures, 'Time related measures'],
[ConfigurationCategory.StatusCheckRelatedMeasures, 'Status check related measures'],
[ConfigurationCategory.ReportGeneratorValue, 'Report generator related predefined strings'],
])
export class ConfigurationInfo implements IReportConfigInfo {
public Description
public PresentationValue
public Value
public ConfigurationName
public ConfigValue
public ConfigurationCategory
constructor(
label: string,
presentationValue: string | number,
value: string | number,
configName: string,
defaultConfigValue: string | number,
configurationCategory: ConfigurationCategory,
) {
this.Description = label
this.PresentationValue = presentationValue
this.Value = value
this.ConfigurationName = configName
this.ConfigValue = defaultConfigValue
this.ConfigurationCategory = configurationCategory
}
}
export class ReportConfigurationEntry implements IReportConfigurationEntry {
public Id
public Info
public | PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') { |
this.Id = id
this.Info = info
this.PullRequestCallback = measureCallback
}
}
export class Report implements IReport {
public Id = ''
public Description = ''
public Entries: ReportConfigurationEntry[] = []
}
| src/Report.Definitions.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " ConfigValue: string | number\n}\nexport interface IReportConfigurationEntry {\n Id: string\n Info: IReportConfigInfo\n PullRequestCallback: PullRequestCallback\n}\nexport interface IReport {\n Id: string\n Description: string",
"score": 40.44542370608348
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const categoryEntries = entries.filter((entry) => entry.Info.ConfigurationCategory === measureCategory)\n categoryEntries.forEach((entry) => {\n entry.Info.Value = entry.PullRequestCallback(pr)\n })\n const rows = categoryEntries.map((entry) => ({\n Description: entry.Info.Description,\n Value: entry.Info.Value,\n }))\n return table({\n columns: [{ name: this.DescriptionHeaderLabel }, { name: this.ValueHeaderLabel }],",
"score": 23.126672626588142
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": "export class PullRequestComment implements IPullRequestComment {\n public authorLogin = ''\n public createdAt = ''\n public body = ''\n public authorAssociation = ''\n public id = ''\n public url = ''\n public viewerDidAuthor = false\n public static CreateFromJson(json: unknown): IPullRequestComment {\n const jsonObject = json as {",
"score": 19.268140362623303
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": "}\nexport class PullRequestCommit implements IPullRequestCommit {\n public authors: ICommitAuthor[] = []\n public committer = ''\n public authorDate = ''\n public commitDate = ''\n public commitHeader = ''\n public commitBody = ''\n public commitId = ''\n public static CreateFromJson(json: unknown): IPullRequestCommit {",
"score": 17.82351991410465
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const table = this.GenerateMeasureTable(pr, report)\n const reportElements = [header, ...table]\n return tsMarkdown(reportElements)\n }\n public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {\n const title = { h1: `${report.Description} (#${pr.id})` }\n return title\n }\n public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {\n if (entries !== undefined && entries !== null && entries.length > 0) {",
"score": 17.715938858062074
}
] | typescript | PullRequestCallback: PullRequestCallback
constructor(id = '', info: IReportConfigInfo, measureCallback: PullRequestCallback = () => '') { |
// for license and copyright look at the repository
import {
IPullRequest,
IPullRequestComment,
IPullRequestCommit,
IPullRequestReview,
} from './Interfaces/PullRequestTypes'
import { EventWithTime } from './Interfaces/ReportTypes'
import { StatusCheck } from './PullRequest.Definitions'
export const GenerateEventTimeline = (pullRequest: IPullRequest): EventWithTime[] => {
const events: EventWithTime[][] = []
// merge all interesting events into a single list
events.push([
{ type: 'createAt', date: new Date(pullRequest.createdAt), event_instance: pullRequest.createdAt, time: 0 },
])
events.push(
pullRequest.commits.map((commit) => ({
type: 'commit',
date: new Date(commit.authorDate),
event_instance: commit,
time: 0,
})),
)
events.push(
pullRequest.reviews.map((review) => ({
type: 'review',
date: new Date(review.submittedAt),
event_instance: review,
time: 0,
})),
)
events.push(
pullRequest.statusChecks.map((statusCheck) => ({
type: 'statusCheck',
date: new Date(statusCheck.completedAt),
event_instance: statusCheck,
time: 0,
})),
)
events.push(
pullRequest.comments.map((comment) => ({
type: 'comment',
date: new Date(comment.createdAt),
event_instance: comment,
time: 0,
})),
)
events.push([
{ type: 'mergedAt', date: new Date(pullRequest.mergedAt), event_instance: pullRequest.mergedAt, time: 0 },
])
events.push([
{ type: 'closedAt', date: new Date(pullRequest.closedAt), event_instance: pullRequest.closedAt, time: 0 },
])
// flatten the list
const flattenedEvents = events.flat()
// filter out events that don't have a valid date
const filteredEvents = flattenedEvents.filter((event) => event.date !== null)
// sort the events by date
filteredEvents.sort((a, b) => a.date.getTime() - b.date.getTime())
// now, create a list of events with the time between events
const eventsWithTime: EventWithTime[] = []
// calculate the time between events
for (let i = 0; i < filteredEvents.length; i++) {
if (i === 0) {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: 0,
event_instance: filteredEvents[i].event_instance,
})
} else {
eventsWithTime.push({
type: filteredEvents[i].type,
date: filteredEvents[i].date,
time: (filteredEvents[i].date.getTime() - filteredEvents[i - 1].date.getTime()) / 1000,
event_instance: filteredEvents[i].event_instance,
})
}
}
return eventsWithTime
}
export const MillisecondsToReadableDuration = (leadTimeInMSec: number) => {
const seconds = +(leadTimeInMSec / 1000).toFixed(1)
const minutes = +(leadTimeInMSec / (1000 * 60)).toFixed(1)
const hours = +(leadTimeInMSec / (1000 * 60 * 60)).toFixed(1)
const days = +(leadTimeInMSec / (1000 * 60 * 60 * 24)).toFixed(1)
if (seconds < 60) return `${seconds} Sec`
else if (minutes < 60) return `${minutes} Min`
else if (hours < 24) return `${hours} Hours`
else return `${days} Days`
}
export const GetMergedOrClosedDate = (pullRequest: IPullRequest): string => {
let mergedOrClosedAt = pullRequest.mergedAt
if (mergedOrClosedAt == null) mergedOrClosedAt = pullRequest.closedAt
return mergedOrClosedAt
}
export const GetLeadTimeForPullRequest = (pullRequest: IPullRequest) => {
// parse createAt as date from string
const createAt = new Date(pullRequest.createdAt)
const mergedOrClosedAt = new Date(GetMergedOrClosedDate(pullRequest))
const duration = mergedOrClosedAt.getTime() - createAt.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRCreated = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const createAtEvent = eventTimeline.find((event) => event.type === 'createAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (!createAtEvent || !firstCommitEvent) return 0
const duration = createAtEvent.date.getTime() - firstCommitEvent.date.getTime()
if (duration <= 0 || isNaN(duration)) return 0
return duration
}
export const GetTimeSpendOnBranchBeforePRMerged = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const firstCommitEvent = eventTimeline.find((event) => event.type === 'commit')
if (mergedAtEvent && firstCommitEvent && mergedAtEvent.date.getTime() > firstCommitEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - firstCommitEvent.date.getTime()
}
return -1
}
export const GetTimeToMergeAfterLastReview = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const mergedAtEvent = eventTimeline.find((event) => event.type === 'mergedAt')
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return -1
}
const lastReviewEvent = reviewEvents.reverse()[0]
if (mergedAtEvent && lastReviewEvent && mergedAtEvent.date.getTime() > lastReviewEvent.date.getTime()) {
return mergedAtEvent.date.getTime() - lastReviewEvent.date.getTime()
}
return -1
}
export const GetTotalRuntimeForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let totalTime = 0
statusCheckEvents.forEach((statusCheck) => {
totalTime | += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
} |
export const GetTimeSpendInPrForLastStatusCheckRun = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const statusCheckEvents = eventTimeline
.filter((event) => event.type === 'statusCheck')
.map((event) => event.event_instance as StatusCheck)
.filter((statusCheck) => statusCheck.status == 'COMPLETED')
if (statusCheckEvents.length <= 0) {
return 0
}
let earliestStart = new Date()
let latestCompletion = new Date(0, 0, 0)
statusCheckEvents.forEach((statusCheckEvent) => {
const completedDate = new Date(statusCheckEvent.completedAt)
const startedDate = new Date(statusCheckEvent.startedAt)
if (startedDate < earliestStart) {
earliestStart = startedDate
}
if (completedDate > latestCompletion) {
latestCompletion = completedDate
}
})
return latestCompletion.getTime() - earliestStart.getTime()
}
const FilterReviewsByState = (pullRequest: IPullRequest, state: string) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
if (reviewEvents.length <= 0) {
return []
}
const filteredReviews = reviewEvents.filter((reviewEvent) => {
const review = reviewEvent.event_instance as IPullRequestReview
return review.state === state
})
return filteredReviews
}
export const GetNumberOfCommentOnlyReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'COMMENTED').length
}
export const GetNumberOfRequestedChangeReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'CHANGES_REQUESTED').length
}
export const GetNumberOfApprovedReviews = (pullRequest: IPullRequest) => {
return FilterReviewsByState(pullRequest, 'APPROVED').length
}
export const GetUniqueReviewParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const reviewEvents = eventTimeline.filter((event) => event.type === 'review')
// extract unique reviewers from review events
return reviewEvents
.map((reviewEvent) => reviewEvent.event_instance as IPullRequestReview)
.map((review) => review.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommentParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commentEvents = eventTimeline.filter((event) => event.type === 'comment')
// extract unique commenter from review events
return commentEvents
.map((commentEvent) => commentEvent.event_instance as IPullRequestComment)
.map((comment) => comment.authorLogin)
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetUniqueCommitterParticipants = (pullRequest: IPullRequest) => {
const eventTimeline = GenerateEventTimeline(pullRequest)
const commitEvents = eventTimeline.filter((event) => event.type === 'commit')
// extract unique reviewers from review events
return commitEvents
.map((commitEvent) => commitEvent.event_instance as IPullRequestCommit)
.map((commit) => commit.authors.filter((author) => author.login !== null).map((author) => author.login))
.flat()
.filter((value, index, self) => self.indexOf(value) === index)
}
export const GetNumberOfActivePullRequestReviewParticipants = (pullRequest: IPullRequest) => {
const uniqueReviewers = GetUniqueReviewParticipants(pullRequest)
const uniqueCommenter = GetUniqueCommentParticipants(pullRequest)
return uniqueReviewers.concat(uniqueCommenter).filter((value, index, self) => self.indexOf(value) === index).length
}
export const GetNumberOfPullRequestCommitter = (pullRequest: IPullRequest) => {
return GetUniqueCommitterParticipants(pullRequest).length
}
export const GetTotalNumberOfParticipants = (pullRequest: IPullRequest) => {
return GetNumberOfActivePullRequestReviewParticipants(pullRequest) + GetNumberOfPullRequestCommitter(pullRequest)
}
| src/Report.Calculation.ts | philips-software-pull-request-report-action-3390d78 | [
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " statusCheck.completedAt = jsonObject['completedAt']\n statusCheck.conclusion = jsonObject['conclusion']\n statusCheck.status = jsonObject['status']\n statusCheck.name = jsonObject['name']\n return statusCheck\n }\n}\nexport class PullRequest implements IPullRequest {\n public id = 0\n public title = ''",
"score": 28.385886758690734
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " startedAt: string\n completedAt: string\n conclusion: string\n status: string\n name: string\n detailsUrl: string\n }\n const statusCheck = new StatusCheck()\n statusCheck.workflowName = jsonObject['workflowName']\n statusCheck.startedAt = jsonObject['startedAt']",
"score": 26.51494471409145
},
{
"filename": "src/PullRequest.Definitions.ts",
"retrieved_chunk": " PullRequestComment.CreateFromJson(comment),\n )\n pr.statusChecks = ParseArrayOfType<IStatusCheck>(cliPullRequestObject['statusCheckRollup'], (statusCheck) =>\n StatusCheck.CreateFromJson(statusCheck),\n )\n pr.fileChangeSummary = FileChangeSummary.CreateFromJson(cliPullRequestObject)\n return pr\n }\n}",
"score": 18.106832844962124
},
{
"filename": "src/Interfaces/ReportTypes.ts",
"retrieved_chunk": " Entries: IReportConfigurationEntry[]\n}\nexport interface EventWithTime {\n type: string\n date: Date\n time: number\n event_instance: unknown\n}",
"score": 10.842392118250268
},
{
"filename": "src/Report.Generation.ts",
"retrieved_chunk": " const table = this.GenerateMeasureTable(pr, report)\n const reportElements = [header, ...table]\n return tsMarkdown(reportElements)\n }\n public GenerateHeader(pr: IPullRequest, report: IReport): H1Entry {\n const title = { h1: `${report.Description} (#${pr.id})` }\n return title\n }\n public GetMeasurementEntries(entries: IReportConfigurationEntry[]): IReportConfigurationEntry[] {\n if (entries !== undefined && entries !== null && entries.length > 0) {",
"score": 10.029686114226207
}
] | typescript | += new Date(statusCheck.completedAt).getTime() - new Date(statusCheck.startedAt).getTime()
})
return totalTime
} |
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: [email protected]
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router.get | ('/', protect, restrictTo('admin'), fetchUsers); |
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router.delete('/:id', restrictTo('user'), deleteUser);
export default router;
| src/modules/auth/controller/users.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * description: The authenticated user.\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/login', login);\n/**\n * @swagger\n * /api/v1/auth/refresh:",
"score": 23.36940559308816
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/signup', signup);\n/**\n * @swagger\n * /api/v1/auth/login:\n * post:",
"score": 22.615784206928453
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * summary: Login User\n * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/LoginRequest'\n * responses:\n * \"200\":",
"score": 19.57300590680708
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * tags: [Auth]\n * requestBody:\n * required: true\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/SignupRequest'\n * responses:\n * \"200\":\n * description: The created user.",
"score": 19.21657302046459
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * description: Invalid request parameters\n * '401':\n * description: Unauthorized request\n */\nrouter.post('/transfer', protect, transferFund);\nexport default router;",
"score": 17.839611483388857
}
] | typescript | ('/', protect, restrictTo('admin'), fetchUsers); |
/**
* @swagger
* components:
* schemas:
* SignupRequest:
* type: object
* required:
* - email
* - password
* - name
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* name: John Doe
* email: [email protected]
* password: password123
* LoginRequest:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* description: The user email address
* password:
* type: string
* description: The user password
* example:
* email: [email protected]
* password: password123
*/
import express from 'express';
import { getMe, login, refresh, signup } from '../service';
import { refreshMiddleware } from '../../../middleware/refresh';
import { protect } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/auth/signup:
* post:
* summary: Creates an account
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/SignupRequest'
* responses:
* "200":
* description: The created user.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/User'
*/
router.post('/signup', signup);
/**
* @swagger
* /api/v1/auth/login:
* post:
* summary: Login User
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/LoginRequest'
* responses:
* "200":
* description: The authenticated user.
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/User'
*/
router.post('/login', login);
/**
* @swagger
* /api/v1/auth/refresh:
* post:
* summary: Refreshes the access token
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - refresh
* properties:
* refresh:
* type: string
* description: Refresh token
* example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjY0NGYwMjg0MWRmNGJlYzliOWI3ZjlhYSIsImlhdCI6MTY4Mjg5OTU4OCwiZXhwIjoxNjgzMDcyMzg4fQ.Bt2kzyxyUEtUy9pLvr0zSzpI8_xTaM6KulO2mwYztbQ
* responses:
* "200":
* description: The new access token
* content:
* application/json:
* schema:
* type: object
* properties:
* accessToken:
* type: string
* description: Access token
* example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMn0.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c
* "400":
* description: Invalid request or refresh token is not present
* "401":
* description: Invalid or expired token or refresh token was already used
*/
router.post('/refresh', refreshMiddleware, refresh);
/**
* @swagger
* /api/v1/auth/me:
* post:
* summary: Get user profile
* tags: [Auth]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: The user profile
* "401":
* description: Unauthorized
*/
router.post(' | /me', protect, getMe); |
export default router;
| src/modules/auth/controller/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * description: Invalid request parameters\n * '401':\n * description: Unauthorized request\n */\nrouter.post('/transfer', protect, transferFund);\nexport default router;",
"score": 25.78394479631677
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * tags: [User]\n * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: A list of users\n * content:\n * application/json:\n * schema:\n * type: array",
"score": 18.95573333462072
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * description: The ID of the user to delete\n * responses:\n * \"204\":\n * description: User deleted successfully\n * \"401\":\n * description: Unauthorized\n * \"404\":\n * description: User not found\n */\n// A simple case where users can only delete themselves not the admin",
"score": 17.81497743905644
},
{
"filename": "src/modules/auth/controller/users.ts",
"retrieved_chunk": " * items:\n * $ref: '#/components/schemas/User'\n * \"401\":\n * description: Unauthorized\n */\nrouter.get('/', protect, restrictTo('admin'), fetchUsers);\n/**\n * @swagger\n * /api/v1/users/{id}:\n * delete:",
"score": 17.185952635685464
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * description: The amount of funds to transfer.\n * example: 1000.00\n * tag:\n * type: string\n * description: The tag associated with the transfer.\n * example: \"Rent payment\"\n * responses:\n * '200':\n * description: Successful transfer of funds\n * '400':",
"score": 15.12522358086859
}
] | typescript | /me', protect, getMe); |
import { sign } from 'jsonwebtoken';
import { IUser } from '../types';
import { Request, Response } from 'express';
import User from '../model';
import { AppError } from '../../../utils/appError';
import { catchAsync } from '../../../utils/catchAsync';
import redisService from '../../../utils/redis';
const accessToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_ACCESS, role: user.role },
process.env.JWT_KEY_SECRET as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const refreshToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_REFRESH, role: user.role },
process.env.JWT_KEY_REFRESH as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const createSendToken = (user: IUser, statusCode: number, req: Request, res: Response) => {
const acess = accessToken(user);
const refresh = refreshToken(user);
// Remove password from output
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, email, role, ...otherUserData } = user;
res.status(statusCode).json({
status: 'success',
acess,
refresh,
data: {
name,
email,
role,
},
});
};
export const signup = catchAsync(async (req, res) => {
const newUser = await User.create({
name: req.body.name,
email: req.body.email,
password: req.body.password,
});
createSendToken(newUser, 201, req, res);
});
export const login = catchAsync(async | (req, res, next) => { |
const { email, password } = req.body;
// 1) Check if email and password exist
if (!email || !password) {
return next(new AppError('Please provide email and password!', 400));
}
// 2) Check if user exists && password is correct
const user: any = await User.findOne({ email }).select('+password');
if (!user || !(await user.correctPassword(password, user.password))) {
return next(new AppError('Incorrect email or password', 401));
}
// 3) If everything ok, send token to client
createSendToken(user, 200, req, res);
});
export const getMe = catchAsync(async (req, res) => {
const user = req.user;
// 3) If everything ok, send token to client
res.status(200).json({ message: 'user sucessfully fetched!', user });
});
export function logout(req: Request, res: Response) {
res.cookie('jwt', 'loggedout', {
expires: new Date(Date.now() + 10 * 1000),
httpOnly: true,
});
res.status(200).json({ status: 'success' });
}
export async function refresh(req: Request, res: Response) {
const user: any = req.user;
await redisService.set({
key: user?.token,
value: '1',
timeType: 'EX',
time: parseInt(process.env.JWT_REFRESH_TIME || '', 10),
});
const refresh = refreshToken(user);
return res.status(200).json({ status: 'sucess', refresh });
}
export async function fetchUsers(req: Request, res: Response) {
const body = req.body;
console.log({ body });
try {
const users = await User.find();
return res.status(200).json({ message: 'sucessfully fetch users', data: users });
} catch (error: any) {
new AppError(error.message, 201);
}
}
export async function deleteUser(req: Request, res: Response) {
const id = req.params.id;
try {
await User.deleteOne({ _id: id });
return res.status(200).json({ message: 'sucessfully deleted users' });
} catch (error: any) {
new AppError(error.message, 201);
}
}
| src/modules/auth/service/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/utils/catchAsync.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\ntype AsyncFunction = (req: Request, res: Response, next: NextFunction) => Promise<any>;\nexport const catchAsync = (fn: AsyncFunction) => {\n return (req: Request, res: Response, next: NextFunction) => {\n fn(req, res, next).catch(next);\n };\n};",
"score": 35.04689734701321
},
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": "import jwt, { JwtPayload } from 'jsonwebtoken';\nimport redisService from '../utils/redis';\nimport { AppError } from '../utils/appError';\nimport { NextFunction, Request, Response } from 'express';\nexport const refreshMiddleware: any = async (req: Request, res: Response, next: NextFunction) => {\n if (req.body?.refresh) {\n const token = req.body.refresh;\n try {\n const decoded: any = jwt.verify(token, process.env.JWT_KEY_REFRESH as string) as JwtPayload;\n if (",
"score": 34.38804612060772
},
{
"filename": "src/middleware/protect.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\nimport { JwtPayload, verify } from 'jsonwebtoken';\nimport { AppError } from '../utils/appError';\nimport { catchAsync } from '../utils/catchAsync';\nimport User from '../modules/auth/model';\nexport const protect = catchAsync(async (req: Request, res: Response, next: NextFunction) => {\n // 1) Getting token and check of it's there\n let token;\n if (req.headers.authorization && req.headers.authorization.startsWith('Bearer')) {\n token = req.headers.authorization.split(' ')[1];",
"score": 32.648631112473666
},
{
"filename": "src/modules/account/service/index.ts",
"retrieved_chunk": "import { Request, Response } from 'express';\nimport Account from '../model';\nexport const transferFund = async (req: Request, res: Response) => {\n const { fromAccountId, toAccountId, amount } = req.body;\n try {\n let srcAccount: any = await Account.findById(fromAccountId);\n let destAccount: any = await Account.findById(toAccountId);\n if (String(srcAccount.user) == String(destAccount.user)) {\n return res.status(400).json({\n error: 'Cannot transfer to own acccount',",
"score": 29.95978992658713
},
{
"filename": "src/middleware/isLoggedIn.ts",
"retrieved_chunk": "/* eslint-disable @typescript-eslint/no-explicit-any */\nimport { NextFunction, Request, Response } from 'express';\nimport jwt from 'jsonwebtoken';\nimport User from '../modules/auth/model';\n// Only for rendered pages, no errors!\nexport async function isLoggedIn(req: Request, res: Response, next: NextFunction) {\n if (req.cookies.jwt) {\n try {\n // 1) verify token\n const decoded: any = await jwt.verify(req.cookies.jwt, process.env.JWT_KEY_SECRET as string);",
"score": 23.53646495015619
}
] | typescript | (req, res, next) => { |
/**
* @swagger
* components:
* schemas:
* User:
* type: object
* required:
* - name
* - email
* properties:
* name:
* type: string
* description: The user name
* email:
* type: string
* format: email
* description: The user email address
* password:
* type: string
* description: The user password (hashed)
* role:
* type: string
* enum: [user, admin]
* description: The user role
* default: user
* example:
* name: John Doe
* email: [email protected]
* password: $2a$10$gR06R4K1NM4p4b4ELq.LlOTzq3Dcxj2iPwE5U/O2MDE70o9noemhO
* role: user
*/
import express from 'express';
import { deleteUser, fetchUsers } from '../service';
import { protect, restrictTo } from '../../../middleware';
const router = express.Router();
/**
* @swagger
* /api/v1/users:
* get:
* summary: Retrieve all users
* tags: [User]
* security:
* - bearerAuth: []
* responses:
* "200":
* description: A list of users
* content:
* application/json:
* schema:
* type: array
* items:
* $ref: '#/components/schemas/User'
* "401":
* description: Unauthorized
*/
router.get('/', protect, restrictTo('admin'), fetchUsers);
/**
* @swagger
* /api/v1/users/{id}:
* delete:
* summary: Delete a user by ID
* tags: [User]
* security:
* - bearerAuth: []
* parameters:
* - in: path
* name: id
* schema:
* type: string
* required: true
* description: The ID of the user to delete
* responses:
* "204":
* description: User deleted successfully
* "401":
* description: Unauthorized
* "404":
* description: User not found
*/
// A simple case where users can only delete themselves not the admin
router | .delete('/:id', restrictTo('user'), deleteUser); |
export default router;
| src/modules/auth/controller/users.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * security:\n * - bearerAuth: []\n * responses:\n * \"200\":\n * description: The user profile\n * \"401\":\n * description: Unauthorized\n */\nrouter.post('/me', protect, getMe);\nexport default router;",
"score": 20.26443095415952
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * description: Invalid request parameters\n * '401':\n * description: Unauthorized request\n */\nrouter.post('/transfer', protect, transferFund);\nexport default router;",
"score": 20.162625913148
},
{
"filename": "src/modules/auth/service/index.ts",
"retrieved_chunk": " new AppError(error.message, 201);\n }\n}\nexport async function deleteUser(req: Request, res: Response) {\n const id = req.params.id;\n try {\n await User.deleteOne({ _id: id });\n return res.status(200).json({ message: 'sucessfully deleted users' });\n } catch (error: any) {\n new AppError(error.message, 201);",
"score": 16.182647548468832
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * description: The authenticated user.\n * content:\n * application/json:\n * schema:\n * $ref: '#/components/schemas/User'\n */\nrouter.post('/login', login);\n/**\n * @swagger\n * /api/v1/auth/refresh:",
"score": 13.55597814240315
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * application/json:\n * schema:\n * type: object\n * properties:\n * accessToken:\n * type: string\n * description: Access token\n * example: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMn0.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c\n * \"400\":\n * description: Invalid request or refresh token is not present",
"score": 13.17132275887241
}
] | typescript | .delete('/:id', restrictTo('user'), deleteUser); |
import { sign } from 'jsonwebtoken';
import { IUser } from '../types';
import { Request, Response } from 'express';
import User from '../model';
import { AppError } from '../../../utils/appError';
import { catchAsync } from '../../../utils/catchAsync';
import redisService from '../../../utils/redis';
const accessToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_ACCESS, role: user.role },
process.env.JWT_KEY_SECRET as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const refreshToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_REFRESH, role: user.role },
process.env.JWT_KEY_REFRESH as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const createSendToken = (user: IUser, statusCode: number, req: Request, res: Response) => {
const acess = accessToken(user);
const refresh = refreshToken(user);
// Remove password from output
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, email, role, ...otherUserData } = user;
res.status(statusCode).json({
status: 'success',
acess,
refresh,
data: {
name,
email,
role,
},
});
};
| export const signup = catchAsync(async (req, res) => { |
const newUser = await User.create({
name: req.body.name,
email: req.body.email,
password: req.body.password,
});
createSendToken(newUser, 201, req, res);
});
export const login = catchAsync(async (req, res, next) => {
const { email, password } = req.body;
// 1) Check if email and password exist
if (!email || !password) {
return next(new AppError('Please provide email and password!', 400));
}
// 2) Check if user exists && password is correct
const user: any = await User.findOne({ email }).select('+password');
if (!user || !(await user.correctPassword(password, user.password))) {
return next(new AppError('Incorrect email or password', 401));
}
// 3) If everything ok, send token to client
createSendToken(user, 200, req, res);
});
export const getMe = catchAsync(async (req, res) => {
const user = req.user;
// 3) If everything ok, send token to client
res.status(200).json({ message: 'user sucessfully fetched!', user });
});
export function logout(req: Request, res: Response) {
res.cookie('jwt', 'loggedout', {
expires: new Date(Date.now() + 10 * 1000),
httpOnly: true,
});
res.status(200).json({ status: 'success' });
}
export async function refresh(req: Request, res: Response) {
const user: any = req.user;
await redisService.set({
key: user?.token,
value: '1',
timeType: 'EX',
time: parseInt(process.env.JWT_REFRESH_TIME || '', 10),
});
const refresh = refreshToken(user);
return res.status(200).json({ status: 'sucess', refresh });
}
export async function fetchUsers(req: Request, res: Response) {
const body = req.body;
console.log({ body });
try {
const users = await User.find();
return res.status(200).json({ message: 'sucessfully fetch users', data: users });
} catch (error: any) {
new AppError(error.message, 201);
}
}
export async function deleteUser(req: Request, res: Response) {
const id = req.params.id;
try {
await User.deleteOne({ _id: id });
return res.status(200).json({ message: 'sucessfully deleted users' });
} catch (error: any) {
new AppError(error.message, 201);
}
}
| src/modules/auth/service/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/middleware/protect.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\nimport { JwtPayload, verify } from 'jsonwebtoken';\nimport { AppError } from '../utils/appError';\nimport { catchAsync } from '../utils/catchAsync';\nimport User from '../modules/auth/model';\nexport const protect = catchAsync(async (req: Request, res: Response, next: NextFunction) => {\n // 1) Getting token and check of it's there\n let token;\n if (req.headers.authorization && req.headers.authorization.startsWith('Bearer')) {\n token = req.headers.authorization.split(' ')[1];",
"score": 10.27481144660087
},
{
"filename": "src/utils/catchAsync.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\ntype AsyncFunction = (req: Request, res: Response, next: NextFunction) => Promise<any>;\nexport const catchAsync = (fn: AsyncFunction) => {\n return (req: Request, res: Response, next: NextFunction) => {\n fn(req, res, next).catch(next);\n };\n};",
"score": 9.88492815162634
},
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": " req.user = {\n email: decoded.email,\n name: decoded.name,\n role: decoded.role,\n token,\n };\n next();\n return;\n } catch (err) {\n console.log({ err });",
"score": 9.797796687975092
},
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": "import jwt, { JwtPayload } from 'jsonwebtoken';\nimport redisService from '../utils/redis';\nimport { AppError } from '../utils/appError';\nimport { NextFunction, Request, Response } from 'express';\nexport const refreshMiddleware: any = async (req: Request, res: Response, next: NextFunction) => {\n if (req.body?.refresh) {\n const token = req.body.refresh;\n try {\n const decoded: any = jwt.verify(token, process.env.JWT_KEY_REFRESH as string) as JwtPayload;\n if (",
"score": 9.192050672156531
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": "import express from 'express';\nimport { getMe, login, refresh, signup } from '../service';\nimport { refreshMiddleware } from '../../../middleware/refresh';\nimport { protect } from '../../../middleware';\nconst router = express.Router();\n/**\n * @swagger\n * /api/v1/auth/signup:\n * post:\n * summary: Creates an account",
"score": 8.987614342960367
}
] | typescript | export const signup = catchAsync(async (req, res) => { |
import { sign } from 'jsonwebtoken';
import { IUser } from '../types';
import { Request, Response } from 'express';
import User from '../model';
import { AppError } from '../../../utils/appError';
import { catchAsync } from '../../../utils/catchAsync';
import redisService from '../../../utils/redis';
const accessToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_ACCESS, role: user.role },
process.env.JWT_KEY_SECRET as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const refreshToken = (user: { _id: string; name: string; email: string; role: string }) => {
return sign(
{ id: user._id, name: user.name, email: user.email, type: process.env.JWT_REFRESH, role: user.role },
process.env.JWT_KEY_REFRESH as string,
{
subject: user.email,
expiresIn: process.env.JWT_EXPIRES_IN,
audience: process.env.JWT_AUDIENCE,
issuer: process.env.JWT_ISSUER,
},
);
};
const createSendToken = (user: IUser, statusCode: number, req: Request, res: Response) => {
const acess = accessToken(user);
const refresh = refreshToken(user);
// Remove password from output
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { name, email, role, ...otherUserData } = user;
res.status(statusCode).json({
status: 'success',
acess,
refresh,
data: {
name,
email,
role,
},
});
};
export const signup = catchAsync(async (req, res) => {
const newUser = await User.create({
name: req.body.name,
email: req.body.email,
password: req.body.password,
});
createSendToken(newUser, 201, req, res);
});
export const login = catchAsync(async (req, res, next) => {
const { email, password } = req.body;
// 1) Check if email and password exist
if (!email || !password) {
| return next(new AppError('Please provide email and password!', 400)); |
}
// 2) Check if user exists && password is correct
const user: any = await User.findOne({ email }).select('+password');
if (!user || !(await user.correctPassword(password, user.password))) {
return next(new AppError('Incorrect email or password', 401));
}
// 3) If everything ok, send token to client
createSendToken(user, 200, req, res);
});
export const getMe = catchAsync(async (req, res) => {
const user = req.user;
// 3) If everything ok, send token to client
res.status(200).json({ message: 'user sucessfully fetched!', user });
});
export function logout(req: Request, res: Response) {
res.cookie('jwt', 'loggedout', {
expires: new Date(Date.now() + 10 * 1000),
httpOnly: true,
});
res.status(200).json({ status: 'success' });
}
export async function refresh(req: Request, res: Response) {
const user: any = req.user;
await redisService.set({
key: user?.token,
value: '1',
timeType: 'EX',
time: parseInt(process.env.JWT_REFRESH_TIME || '', 10),
});
const refresh = refreshToken(user);
return res.status(200).json({ status: 'sucess', refresh });
}
export async function fetchUsers(req: Request, res: Response) {
const body = req.body;
console.log({ body });
try {
const users = await User.find();
return res.status(200).json({ message: 'sucessfully fetch users', data: users });
} catch (error: any) {
new AppError(error.message, 201);
}
}
export async function deleteUser(req: Request, res: Response) {
const id = req.params.id;
try {
await User.deleteOne({ _id: id });
return res.status(200).json({ message: 'sucessfully deleted users' });
} catch (error: any) {
new AppError(error.message, 201);
}
}
| src/modules/auth/service/index.ts | walosha-BACKEND_DEV_TESTS-db2fcb4 | [
{
"filename": "src/middleware/refresh.ts",
"retrieved_chunk": "import jwt, { JwtPayload } from 'jsonwebtoken';\nimport redisService from '../utils/redis';\nimport { AppError } from '../utils/appError';\nimport { NextFunction, Request, Response } from 'express';\nexport const refreshMiddleware: any = async (req: Request, res: Response, next: NextFunction) => {\n if (req.body?.refresh) {\n const token = req.body.refresh;\n try {\n const decoded: any = jwt.verify(token, process.env.JWT_KEY_REFRESH as string) as JwtPayload;\n if (",
"score": 33.01250119551897
},
{
"filename": "src/modules/auth/model/index.ts",
"retrieved_chunk": " password: string;\n}\nconst userSchema = new Schema<IUser>({\n name: {\n type: String,\n required: [true, 'Please tell us your name!'],\n },\n email: {\n type: String,\n required: [true, 'Please provide your email'],",
"score": 32.67343471345242
},
{
"filename": "src/middleware/protect.ts",
"retrieved_chunk": "import { NextFunction, Request, Response } from 'express';\nimport { JwtPayload, verify } from 'jsonwebtoken';\nimport { AppError } from '../utils/appError';\nimport { catchAsync } from '../utils/catchAsync';\nimport User from '../modules/auth/model';\nexport const protect = catchAsync(async (req: Request, res: Response, next: NextFunction) => {\n // 1) Getting token and check of it's there\n let token;\n if (req.headers.authorization && req.headers.authorization.startsWith('Bearer')) {\n token = req.headers.authorization.split(' ')[1];",
"score": 32.1084410364793
},
{
"filename": "src/modules/auth/controller/index.ts",
"retrieved_chunk": " * email:\n * type: string\n * description: The user email address\n * password:\n * type: string\n * description: The user password\n * example:\n * email: [email protected]\n * password: password123\n */",
"score": 31.404045148977975
},
{
"filename": "src/modules/account/controller/index.ts",
"retrieved_chunk": " * email:\n * type: string\n * description: The user email address\n * password:\n * type: string\n * description: The user password\n * example:\n * email: [email protected]\n * password: password123\n */",
"score": 31.404045148977975
}
] | typescript | return next(new AppError('Please provide email and password!', 400)); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.