prefix
stringlengths 82
32.6k
| middle
stringlengths 5
470
| suffix
stringlengths 0
81.2k
| file_path
stringlengths 6
168
| repo_name
stringlengths 16
77
| context
listlengths 5
5
| lang
stringclasses 4
values | ground_truth
stringlengths 5
470
|
---|---|---|---|---|---|---|---|
/* eslint-disable @typescript-eslint/no-explicit-any */
import fs from 'fs';
import { CoverInfo, CoverInfoFunctionsDetails, CoverInfoLinesDetails } from '../types';
import parseString from 'xml2js';
import * as core from '@actions/core';
const classDetailsFromProjects = (projects: any) => {
let classDetails: any[] = [];
let packageName = null;
const parseFileObject = (fileObj: any, packageName: string) => {
if (fileObj.class) {
fileObj['class'].forEach((classObj: any) => {
classDetails = classDetails.concat({
name: classObj.$.name,
metrics: classObj.metrics[0],
fileName: fileObj.$.name,
fileMetrics: fileObj.metrics[0],
lines: fileObj.line,
packageName: packageName,
});
});
} else {
classDetails = classDetails.concat({
name: null,
metrics: null,
fileName: fileObj.$.name,
fileMetrics: fileObj.metrics[0],
lines: fileObj.line,
packageName: packageName,
});
}
};
projects.forEach((projectObj: any) => {
if (projectObj.package) {
projectObj.package.forEach((data: any) => {
if (data.$?.name) {
packageName = data.$.name;
} else {
packageName = null;
}
data.file.forEach(parseFileObject);
});
}
if (projectObj.file) {
packageName = null;
projectObj.file.forEach(parseFileObject);
}
});
return classDetails;
};
const unpackage = (projects: any): CoverInfo[] => {
const classDetails = classDetailsFromProjects(projects);
return classDetails.map((c: any) => {
| const methodStats: CoverInfoFunctionsDetails[] = []; |
const lineStats: CoverInfoLinesDetails[] = [];
if (c.lines) {
c.lines.forEach((l: any) => {
if (l.$.type === 'method') {
methodStats.push({
name: l.$.name,
line: Number(l.$.num),
hit: Number(l.$.count),
});
} else {
lineStats.push({
line: Number(l.$.num),
hit: Number(l.$.count),
});
}
});
}
const classCov: CoverInfo = {
title: c.name,
file: c.fileName,
functions: {
found: methodStats.length,
hit: 0,
details: methodStats,
},
lines: {
found: lineStats.length,
hit: 0,
details: lineStats,
},
branches: {
found: 0,
hit: 0,
details: [],
},
};
classCov.functions.hit = classCov.functions.details.reduce((acc, val) => {
return acc + (val.hit > 0 ? 1 : 0);
}, 0);
classCov.lines.hit = classCov.lines.details.reduce((acc, val) => {
return acc + (val.hit > 0 ? 1 : 0);
}, 0);
return classCov;
});
};
const parseContent = (xml: any): Promise<CoverInfo[]> => {
return new Promise((resolve, reject) => {
parseString.parseString(xml, (err, parseResult) => {
if (err) {
reject(err);
}
if (!parseResult?.coverage?.project) {
return reject(new Error('invalid or missing xml content'));
}
const result = unpackage(parseResult.coverage.project);
resolve(result);
});
});
};
export const parseFile = async (file: string): Promise<CoverInfo[]> => {
return new Promise((resolve, reject) => {
if (!file || file === '') {
core.info('no clover file specified');
resolve([]);
} else {
fs.readFile(
file,
'utf8',
async (err: NodeJS.ErrnoException | null, data: string) => {
if (err) {
core.error(`failed to read file: ${file}. error: ${err.message}`);
reject(err);
} else {
try {
const info = await parseContent(data);
// console.log('====== clover ======');
// console.log(JSON.stringify(info, null, 2));
resolve(info);
} catch (error) {
core.error(`failed to parseContent. err: ${error.message}`);
reject(error);
}
}
},
);
}
});
};
| src/parsers/clover.ts | aGallea-tests-coverage-report-7728fb4 | [
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": " pack.classes.forEach((c: any) => {\n classes.push(...c.class);\n });\n });\n });\n return classes;\n};\nconst extractLcovStyleBranches = (c: any) => {\n const branches: CoverInfoBranchesDetails[] = [];\n if (c.lines && c.lines[0].line) {",
"score": 17.889288888265572
},
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": " const packages = coverage.packages;\n const source = coverage.sources[0].source[0];\n const classes = classesFromPackages(packages);\n return classes.map((c) => {\n const branches = extractLcovStyleBranches(c);\n const classCov: CoverInfo = {\n title: c.$.name,\n // file: c.$.filename,\n file: path.join(source, c.$.filename).replace(pwd, ''),\n functions: {",
"score": 16.10048868962686
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " let output: CoverInfo[] = [];\n packages.forEach((pack: any) => {\n const cov = pack.sourcefile.map((source: any) => {\n const fullPath = pack.$.name + '/' + source.$.name;\n const methods = getCounter(source, 'METHOD');\n const lines = getCounter(source, 'LINE');\n const branches = getCounter(source, 'BRANCH');\n const classCov: CoverInfo = {\n title: source.$.name,\n file: fullPath,",
"score": 15.840631348871964
},
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": " taken: 1,\n });\n branchNumber++;\n }\n }\n });\n }\n return branches;\n};\nconst unpackage = (coverage: any, pwd: string): CoverInfo[] => {",
"score": 14.614798869910139
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " })[0] || {\n $: {\n covered: 0,\n missed: 0,\n },\n }\n );\n};\nconst unpackage = (report: any): CoverInfo[] => {\n const packages = report.package;",
"score": 13.730641265820662
}
] | typescript | const methodStats: CoverInfoFunctionsDetails[] = []; |
/* eslint-disable @typescript-eslint/no-explicit-any */
import fs from 'fs';
import path from 'path';
import { CoverInfo, CoverInfoBranchesDetails } from '../types';
import parseString from 'xml2js';
import * as core from '@actions/core';
const classesFromPackages = (packages: any) => {
const classes: any[] = [];
packages.forEach((packages: any) => {
packages.package.forEach((pack: any) => {
pack.classes.forEach((c: any) => {
classes.push(...c.class);
});
});
});
return classes;
};
const extractLcovStyleBranches = (c: any) => {
const branches: CoverInfoBranchesDetails[] = [];
if (c.lines && c.lines[0].line) {
c.lines[0].line.forEach((l: any) => {
if (l.$.branch == 'true') {
const branchFraction = l.$['condition-coverage'].split(' ');
const branchStats = branchFraction[1].match(/\d+/g);
const coveredBranches = Number(branchStats[0]);
const totalBranches = Number(branchStats[1]);
const leftBranches = totalBranches - coveredBranches;
let branchNumber = 0;
for (let i = 0; i < leftBranches; i++) {
branches.push({
line: Number(l.$.number),
branch: branchNumber,
taken: 0,
});
branchNumber++;
}
for (let i = 0; i < coveredBranches; i++) {
branches.push({
line: Number(l.$.number),
branch: branchNumber,
taken: 1,
});
branchNumber++;
}
}
});
}
return branches;
};
const unpackage = (coverage: any, pwd: string): CoverInfo[] => {
const packages = coverage.packages;
const source = coverage.sources[0].source[0];
const classes = classesFromPackages(packages);
return classes.map((c) => {
const branches = extractLcovStyleBranches(c);
const classCov: CoverInfo = {
title: c.$.name,
// file: c.$.filename,
file: path.join(source, c.$.filename).replace(pwd, ''),
functions: {
found: c.methods && c.methods[0].method ? c.methods[0].method.length : 0,
hit: 0,
details:
!c.methods || !c.methods[0].method
? []
: c.methods[0].method.map((m: any) => {
return {
name: m.$.name,
line: Number(m.lines[0].line[0].$.number),
hit: Number(m.lines[0].line[0].$.hits),
};
}),
},
lines: {
found: c.lines && c.lines[0].line ? c.lines[0].line.length : 0,
hit: 0,
details:
!c.lines || !c.lines[0].line
? []
: c.lines[0].line.map((l: any) => {
return {
line: Number(l.$.number),
hit: Number(l.$.hits),
};
}),
},
branches: {
found: branches.length,
hit: branches.filter((br) => {
return br.taken > 0;
}).length,
details: branches,
},
};
classCov. | functions.hit = classCov.functions.details.reduce((acc: any, val: any) => { |
return acc + (val.hit > 0 ? 1 : 0);
}, 0);
classCov.lines.hit = classCov.lines.details.reduce((acc: any, val: any) => {
return acc + (val.hit > 0 ? 1 : 0);
}, 0);
return classCov;
});
};
const parseContent = (xml: string, pwd: string): Promise<CoverInfo[]> => {
return new Promise((resolve, reject) => {
parseString.parseString(xml, (err, parseResult) => {
if (err) {
return reject(err);
}
if (!parseResult?.coverage) {
return reject(new Error('invalid or missing xml content'));
}
const result = unpackage(parseResult.coverage, pwd);
resolve(result);
});
});
};
export const parseFile = async (file: string, pwd: string): Promise<CoverInfo[]> => {
return new Promise((resolve, reject) => {
if (!file || file === '') {
core.info('no cobertura file specified');
resolve([]);
} else {
fs.readFile(
file,
'utf8',
async (err: NodeJS.ErrnoException | null, data: string) => {
if (err) {
core.error(`failed to read file: ${file}. error: ${err.message}`);
reject(err);
} else {
try {
const info = await parseContent(data, pwd);
// console.log('====== cobertura ======');
// console.log(JSON.stringify(info, null, 2));
resolve(info);
} catch (error) {
core.error(`failed to parseContent. err: ${error.message}`);
reject(error);
}
}
},
);
}
});
};
| src/parsers/cobertura.ts | aGallea-tests-coverage-report-7728fb4 | [
{
"filename": "src/parsers/clover.ts",
"retrieved_chunk": " hit: 0,\n details: lineStats,\n },\n branches: {\n found: 0,\n hit: 0,\n details: [],\n },\n };\n classCov.functions.hit = classCov.functions.details.reduce((acc, val) => {",
"score": 55.854215704313276
},
{
"filename": "src/parsers/lcov.ts",
"retrieved_chunk": " found: 0,\n hit: 0,\n details: [],\n },\n functions: {\n hit: 0,\n found: 0,\n details: [],\n },\n branches: {",
"score": 35.798781946712225
},
{
"filename": "src/parsers/clover.ts",
"retrieved_chunk": " return acc + (val.hit > 0 ? 1 : 0);\n }, 0);\n classCov.lines.hit = classCov.lines.details.reduce((acc, val) => {\n return acc + (val.hit > 0 ? 1 : 0);\n }, 0);\n return classCov;\n });\n};\nconst parseContent = (xml: any): Promise<CoverInfo[]> => {\n return new Promise((resolve, reject) => {",
"score": 35.218661480764396
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " }),\n },\n branches: {\n found: Number(branches.$.covered) + Number(branches.$.missed),\n hit: Number(branches.$.covered),\n details:\n source.line\n ?.filter((l: any) => {\n return Number(l.$.mb) > 0 || Number(l.$.cb) > 0;\n })",
"score": 33.49344355333735
},
{
"filename": "src/parsers/clover.ts",
"retrieved_chunk": " const classCov: CoverInfo = {\n title: c.name,\n file: c.fileName,\n functions: {\n found: methodStats.length,\n hit: 0,\n details: methodStats,\n },\n lines: {\n found: lineStats.length,",
"score": 31.074724869400363
}
] | typescript | functions.hit = classCov.functions.details.reduce((acc: any, val: any) => { |
import { MarkdownRenderChild, MarkdownPostProcessorContext } from 'obsidian';
import { gDrawer } from './global/drawer';
import { ChemPluginSettings } from './settings/base';
import { addBlock, removeBlock } from './global/blocks';
import { i18n } from 'src/lib/i18n';
export class SmilesBlock extends MarkdownRenderChild {
constructor(
private readonly el: HTMLElement,
private readonly markdownSource: string,
private readonly context: MarkdownPostProcessorContext,
private readonly settings: ChemPluginSettings
) {
super(el); // important
addBlock(this);
}
render() {
// TODO: rendering animation
this.el.empty();
const rows = this.markdownSource
.split('\n')
.filter((row) => row.length > 0)
.map((row) => row.trim());
if (rows.length == 1) {
const div = this.el.createDiv({ cls: 'chem-cell' });
this.renderCell(rows[0], div);
} else {
const table = this.el.createDiv({ cls: 'chem-table' });
const maxWidth = this.settings.options?.width ?? 300;
rows.forEach((row) => {
const cell = table.createDiv({ cls: 'chem-cell' });
const svgcell = this.renderCell(row, cell);
if (parseFloat(svgcell.style.width) > maxWidth)
svgcell.style.width = `${maxWidth.toString()}px`;
});
table.style.gridTemplateColumns = `repeat(auto-fill, minmax(${
this.settings.options.width?.toString() ?? '300'
}px, 1fr)`;
}
}
private renderCell = (source: string, target: HTMLElement) => {
const svg = target.createSvg('svg');
gDrawer.draw(
source,
svg,
document.body.hasClass('theme-dark') &&
!document.body.hasClass('theme-light')
? this.settings.darkTheme
| : this.settings.lightTheme,
null,
(error: object & { name: string; message: string }) => { |
target.empty();
const ErrorContainer = target.createEl('div');
ErrorContainer.createDiv('error-source').setText(
i18n.t('errors.source.title', { source: source })
);
ErrorContainer.createEl('br');
const ErrorInfo = ErrorContainer.createEl('details');
ErrorInfo.createEl('summary').setText(error.name);
ErrorInfo.createEl('div').setText(error.message);
ErrorContainer.style.wordBreak = `break-word`;
ErrorContainer.style.userSelect = `text`;
//TODO: in multiline block, keep the width sync with the grid setting
if (this.settings.options.scale == 0)
ErrorContainer.style.width = `${
this.settings?.imgWidth.toString() ?? '300'
}px`;
else if (
ErrorContainer.offsetWidth >
(this.settings.options?.width ?? 300)
) {
ErrorContainer.style.width = `${(
this.settings.options?.width ?? 300
).toString()}px`;
ErrorContainer.style.height = `${(
this.settings.options?.height ?? 300
).toString()}px`;
}
}
);
if (this.settings.options.scale == 0)
svg.style.width = `${this.settings.imgWidth.toString()}px`;
return svg;
};
async onload() {
this.render();
}
onunload() {
removeBlock(this); // remove from global block list
}
}
| src/SmilesBlock.ts | Acylation-obsidian-chem-54b1d05 | [
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t};\n\tprivate renderCell = (\n\t\tsource: string,\n\t\ttarget: HTMLElement,\n\t\tstyle: string\n\t) => {\n\t\tconst svg = target.createSvg('svg');\n\t\tgDrawer.draw(\n\t\t\tsource,\n\t\t\tsvg,",
"score": 47.484872180889795
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\tstyle,\n\t\t\tnull,\n\t\t\t(error: object & { name: string; message: string }) => {\n\t\t\t\ttarget.empty();\n\t\t\t\tconst ErrorContainer = target.createEl('div');\n\t\t\t\tErrorContainer.createDiv('error-source').setText(\n\t\t\t\t\ti18n.t('errors.source.title', { source: source })\n\t\t\t\t);\n\t\t\t\tErrorContainer.createEl('br');\n\t\t\t\tconst ErrorInfo = ErrorContainer.createEl('details');",
"score": 30.625886935439897
},
{
"filename": "src/themeObserver.ts",
"retrieved_chunk": "});\nexport const setObserver = () => {\n\tthemeObserver.observe(document.body, {\n\t\tattributes: true,\n\t\tattributeOldValue: true,\n\t\tattributeFilter: ['class'],\n\t});\n};\nexport const detachObserver = () => {\n\tthemeObserver.disconnect();",
"score": 19.63693301600281
},
{
"filename": "src/settings/update.ts",
"retrieved_chunk": "interface ChemPluginSettingsV1 {\n\tdarkTheme: string;\n\tlightTheme: string;\n\tsample: string;\n\twidth: string;\n}\n// const DEFAULT_SETTINGS_V1: ChemPluginSettingsV1 = {\n// \tdarkTheme: 'dark',\n// \tlightTheme: 'light',\n// \tsample: 'CC(=O)NC1=C-C=C-C=C1-C(=O)O',",
"score": 17.763415626731824
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\tsvg.style.width = `${(\n\t\t\t\tthis.settings.options?.width ?? 300\n\t\t\t).toString()}px`;\n\t\t\tsvg.style.height = `${(\n\t\t\t\tthis.settings.options?.height ?? 300\n\t\t\t).toString()}px`;\n\t\t}\n\t\treturn parseFloat(svg.style.width);\n\t};\n}",
"score": 17.536072498190492
}
] | typescript | : this.settings.lightTheme,
null,
(error: object & { name: string; message: string }) => { |
/* eslint-disable @typescript-eslint/no-explicit-any */
import fs from 'fs';
import { CoverInfo } from '../types';
import * as core from '@actions/core';
const parseContent = (str: string): CoverInfo[] => {
const data: any[] = [];
let item: CoverInfo;
['end_of_record'].concat(str.split('\n')).forEach((line: string) => {
line = line.trim();
const allparts: string[] = line.split(':') || [];
const parts: string[] = [allparts.shift() || '', allparts.join(':')];
let lines: any[];
let fn: any;
switch (parts[0].toUpperCase()) {
case 'TN':
item.title = parts[1].trim();
break;
case 'SF':
item.file = parts.slice(1).join(':').trim();
break;
case 'FNF':
item.functions.found = Number(parts[1].trim());
break;
case 'FNH':
item.functions.hit = Number(parts[1].trim());
break;
case 'LF':
item.lines.found = Number(parts[1].trim());
break;
case 'LH':
item.lines.hit = Number(parts[1].trim());
break;
case 'DA':
lines = parts[1].split(',');
item.lines.details.push({
line: Number(lines[0]),
hit: Number(lines[1]),
});
break;
case 'FN':
fn = parts[1].split(',');
item.functions.details.push({
name: fn[1],
line: Number(fn[0]),
hit: 0,
});
break;
case 'FNDA':
fn = parts[1].split(',');
item.functions.details.some((i: any, k: any) => {
if (i.name === fn[1] && i.hit === undefined) {
item.functions.details[k].hit = Number(fn[0]);
return true;
}
});
break;
case 'BRDA':
fn = parts[1].split(',');
| item.branches.details.push({ |
line: Number(fn[0]),
block: Number(fn[1]),
branch: Number(fn[2]),
taken: fn[3] === '-' ? 0 : Number(fn[3]),
});
break;
case 'BRF':
item.branches.found = Number(parts[1]);
break;
case 'BRH':
item.branches.hit = Number(parts[1]);
break;
}
if (line === 'end_of_record') {
if (item) {
data.push(item);
}
item = {
title: '',
file: '',
lines: {
found: 0,
hit: 0,
details: [],
},
functions: {
hit: 0,
found: 0,
details: [],
},
branches: {
hit: 0,
found: 0,
details: [],
},
};
}
});
if (!data.length) {
core.info('No lcov file content');
}
return data;
};
export function parseFile(file: string): Promise<CoverInfo[]> {
return new Promise((resolve, reject) => {
if (!file || file === '') {
core.info('no lcov file specified');
resolve([]);
} else {
fs.readFile(
file,
'utf8',
async (err: NodeJS.ErrnoException | null, data: string) => {
if (err) {
core.error(`failed to read file: ${file}. error: ${err.message}`);
reject(err);
} else {
try {
const info = parseContent(data);
// console.log('====== lcov ======');
// console.log(JSON.stringify(info, null, 2));
resolve(info);
} catch (error) {
core.error(`failed to parseContent. err: ${error.message}`);
reject(error);
}
}
},
);
}
});
}
| src/parsers/lcov.ts | aGallea-tests-coverage-report-7728fb4 | [
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": " }).length,\n details: branches,\n },\n };\n classCov.functions.hit = classCov.functions.details.reduce((acc: any, val: any) => {\n return acc + (val.hit > 0 ? 1 : 0);\n }, 0);\n classCov.lines.hit = classCov.lines.details.reduce((acc: any, val: any) => {\n return acc + (val.hit > 0 ? 1 : 0);\n }, 0);",
"score": 34.04697865924401
},
{
"filename": "src/parsers/cobertura.ts",
"retrieved_chunk": " c.lines[0].line.forEach((l: any) => {\n if (l.$.branch == 'true') {\n const branchFraction = l.$['condition-coverage'].split(' ');\n const branchStats = branchFraction[1].match(/\\d+/g);\n const coveredBranches = Number(branchStats[0]);\n const totalBranches = Number(branchStats[1]);\n const leftBranches = totalBranches - coveredBranches;\n let branchNumber = 0;\n for (let i = 0; i < leftBranches; i++) {\n branches.push({",
"score": 32.26160150947868
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " .map((l: any) => {\n let branches: any[] = [];\n const count = Number(l.$.mb) + Number(l.$.cb);\n for (let i = 0; i < count; ++i) {\n branches = branches.concat({\n line: Number(l.$.nr),\n block: 0,\n branch: Number(i),\n taken: i < Number(l.$.cb) ? 1 : 0,\n });",
"score": 31.408369089144625
},
{
"filename": "src/parsers/clover.ts",
"retrieved_chunk": " hit: 0,\n details: lineStats,\n },\n branches: {\n found: 0,\n hit: 0,\n details: [],\n },\n };\n classCov.functions.hit = classCov.functions.details.reduce((acc, val) => {",
"score": 28.42703312892379
},
{
"filename": "src/parsers/jacoco.ts",
"retrieved_chunk": " functions: {\n found: Number(methods.$.covered) + Number(methods.$.missed),\n hit: Number(methods.$.covered),\n details: pack.class.reduce((result: any, currentClass: any) => {\n return !currentClass.method\n ? result\n : result.concat(\n currentClass.method.map((method: any) => {\n const hit = method.counter.some((counter: any) => {\n return counter.$.type === 'METHOD' && counter.$.covered === '1';",
"score": 27.519544451860195
}
] | typescript | item.branches.details.push({ |
import { App, PluginSettingTab, Setting, SliderComponent } from 'obsidian';
import ChemPlugin from '../main';
import {
DEFAULT_SD_OPTIONS,
SAMPLE_SMILES_1,
SAMPLE_SMILES_2,
themeList,
} from './base';
import { setDrawer } from 'src/global/drawer';
import { refreshBlocks } from 'src/global/blocks';
import { LivePreview } from './LivePreview';
import { i18n } from 'src/lib/i18n';
// Reference: https://smilesdrawer.surge.sh/playground.html
export class ChemSettingTab extends PluginSettingTab {
plugin: ChemPlugin;
constructor({ app, plugin }: { app: App; plugin: ChemPlugin }) {
super(app, plugin);
this.plugin = plugin;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
const scaleSetting = new Setting(containerEl)
.setName(i18n.t('settings.scale.name'))
.setDesc(i18n.t('settings.scale.description'))
.addExtraButton((button) => {
button
.setIcon('rotate-ccw')
.setTooltip(i18n.t('settings.scale.description'))
.onClick(async () => {
this.plugin.settings.options.scale = 1;
scaleSlider.setValue(50);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
unifyBondLength();
});
});
const scaleLabel = scaleSetting.controlEl.createDiv('slider-readout');
scaleLabel.setText(
(this.plugin.settings.options.scale ?? 1.0).toFixed(2).toString()
);
const scaleSlider = new SliderComponent(scaleSetting.controlEl)
.setValue(50 * (this.plugin.settings.options.scale ?? 1.0))
.setLimits(0.0, 100, 0.5)
.onChange(async (value) => {
this.plugin.settings.options.scale = value / 50;
scaleLabel.setText((value / 50).toFixed(2).toString());
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
if (value == 0) unifyImageWidth();
else unifyBondLength();
});
const widthSettings = new Setting(containerEl);
new Setting(containerEl)
.setName(i18n.t('settings.theme.light.name'))
.setDesc(i18n.t('settings.theme.light.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.lightTheme)
.onChange(async (value) => {
this.plugin.settings.lightTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.theme.dark.name'))
.setDesc(i18n.t('settings.theme.dark.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.darkTheme)
.onChange(async (value) => {
this.plugin.settings.darkTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.preview.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.preview.sample.name'))
.setDesc(i18n.t('settings.preview.sample.description'))
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_1)
.setValue(this.plugin.settings.sample1)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_1;
}
this.plugin.settings.sample1 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
)
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_2)
.setValue(this.plugin.settings.sample2)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_2;
}
this.plugin.settings.sample2 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
const preview = new LivePreview(containerEl, this.plugin.settings);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.advanced.compact-drawing.name'))
.setDesc(i18n.t('settings.advanced.compact-drawing.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.compactDrawing ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.compactDrawing = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.terminal-carbons.name'))
.setDesc(i18n.t('settings.advanced.terminal-carbons.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.terminalCarbons ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.terminalCarbons = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
const onSettingsChange = () => {
| preview.updateSettings(this.plugin.settings); |
preview.render();
};
const unifyBondLength = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) =>
text
.setValue(
this.plugin.settings.options.width?.toString() ??
'300'
)
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.options.width =
parseInt(value);
this.plugin.settings.options.height =
parseInt(value);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
};
const unifyImageWidth = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) => {
text.setValue(
this.plugin.settings?.imgWidth.toString() ?? '300'
)
.setPlaceholder('300')
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.imgWidth = parseInt(value);
await this.plugin.saveSettings();
onSettingsChange();
});
});
};
// initialize
preview.render();
if ((this.plugin.settings.options?.scale ?? 1) == 0) unifyImageWidth();
else unifyBondLength();
}
hide(): void {
refreshBlocks();
}
}
| src/settings/SettingTab.ts | Acylation-obsidian-chem-54b1d05 | [
{
"filename": "src/main.ts",
"retrieved_chunk": "\tasync saveSettings() {\n\t\tawait this.saveData(this.settings);\n\t}\n\tsmilesProcessor = (\n\t\tsource: string,\n\t\tel: HTMLElement,\n\t\tctx: MarkdownPostProcessorContext\n\t) => {\n\t\tctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.\n\t};",
"score": 21.75188938190649
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tsetObserver();\n\t\tthis.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));\n\t\tthis.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);\n\t}\n\tasync onunload() {\n\t\tdetachObserver();\n\t\tclearBlocks();\n\t\tclearDrawer();\n\t}\n\tasync loadSettings() {",
"score": 15.755904556754448
},
{
"filename": "src/main.ts",
"retrieved_chunk": "import { setDrawer, clearDrawer } from './global/drawer';\nimport { setObserver, detachObserver } from './themeObserver';\nexport default class ChemPlugin extends Plugin {\n\tsettings: ChemPluginSettings;\n\tasync onload() {\n\t\tawait this.loadSettings();\n\t\t// this.addRibbonIcon('hexagon', 'This is Chem Plugin', () => {});\n\t\t// initialize global variables\n\t\tsetDrawer(this.settings.options);\n\t\tsetBlocks();",
"score": 13.139152167649453
},
{
"filename": "src/global/drawer.ts",
"retrieved_chunk": "import { DEFAULT_SD_OPTIONS, SMILES_DRAWER_OPTIONS } from 'src/settings/base';\nimport SmilesDrawer from 'smiles-drawer';\nexport let gDrawer = new SmilesDrawer.SmiDrawer(DEFAULT_SD_OPTIONS);\nexport const setDrawer = (options: Partial<SMILES_DRAWER_OPTIONS>) => {\n\tgDrawer = new SmilesDrawer.SmiDrawer({ ...DEFAULT_SD_OPTIONS, ...options });\n};\nexport const clearDrawer = () => {\n\tgDrawer = {};\n};",
"score": 11.557394856999034
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tconst candidate = Object.assign({}, await this.loadData());\n\t\tif ('version' in candidate && candidate.version == SETTINGS_VERSION)\n\t\t\tthis.settings = Object.assign({}, DEFAULT_SETTINGS, candidate);\n\t\telse\n\t\t\tthis.settings = Object.assign(\n\t\t\t\t{},\n\t\t\t\tDEFAULT_SETTINGS,\n\t\t\t\tupdateSettingsVersion(candidate)\n\t\t\t);\n\t}",
"score": 9.708016568280444
}
] | typescript | preview.updateSettings(this.plugin.settings); |
import { Plugin, MarkdownPostProcessorContext } from 'obsidian';
import {
DEFAULT_SETTINGS,
ChemPluginSettings,
SETTINGS_VERSION,
} from './settings/base';
import { ChemSettingTab } from './settings/SettingTab';
import { updateSettingsVersion } from './settings/update';
import { SmilesBlock } from './SmilesBlock';
import { setBlocks, clearBlocks } from './global/blocks';
import { setDrawer, clearDrawer } from './global/drawer';
import { setObserver, detachObserver } from './themeObserver';
export default class ChemPlugin extends Plugin {
settings: ChemPluginSettings;
async onload() {
await this.loadSettings();
// this.addRibbonIcon('hexagon', 'This is Chem Plugin', () => {});
// initialize global variables
setDrawer(this.settings.options);
setBlocks();
setObserver();
this.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));
this.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);
}
async onunload() {
detachObserver();
clearBlocks();
clearDrawer();
}
async loadSettings() {
const candidate = Object.assign({}, await this.loadData());
if ('version' in candidate && candidate.version == SETTINGS_VERSION)
this.settings = Object.assign({}, DEFAULT_SETTINGS, candidate);
else
this.settings = Object.assign(
{},
DEFAULT_SETTINGS,
updateSettingsVersion(candidate)
);
}
async saveSettings() {
await this.saveData(this.settings);
}
smilesProcessor = (
source: string,
el: HTMLElement,
ctx: MarkdownPostProcessorContext
) => {
| ctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.
}; |
}
| src/main.ts | Acylation-obsidian-chem-54b1d05 | [
{
"filename": "src/SmilesBlock.ts",
"retrieved_chunk": "import { MarkdownRenderChild, MarkdownPostProcessorContext } from 'obsidian';\nimport { gDrawer } from './global/drawer';\nimport { ChemPluginSettings } from './settings/base';\nimport { addBlock, removeBlock } from './global/blocks';\nimport { i18n } from 'src/lib/i18n';\nexport class SmilesBlock extends MarkdownRenderChild {\n\tconstructor(\n\t\tprivate readonly el: HTMLElement,\n\t\tprivate readonly markdownSource: string,\n\t\tprivate readonly context: MarkdownPostProcessorContext,",
"score": 15.694248704817484
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t};\n\tprivate renderCell = (\n\t\tsource: string,\n\t\ttarget: HTMLElement,\n\t\tstyle: string\n\t) => {\n\t\tconst svg = target.createSvg('svg');\n\t\tgDrawer.draw(\n\t\t\tsource,\n\t\t\tsvg,",
"score": 15.075406960970254
},
{
"filename": "src/global/blocks.ts",
"retrieved_chunk": "};\n// TODO: late refresh\n// this manager will get all the living instances\n// however, we only want to see the instances refreshed when the file is activated\n// file info is in the ctx of the block, and activate attribute is in the leaf metadata\nexport const refreshBlocks = () => {\n\tgBlocks.forEach((block) => {\n\t\tblock.render();\n\t});\n};",
"score": 13.143943052828316
},
{
"filename": "src/settings/SettingTab.ts",
"retrieved_chunk": "\t\t\t\t\t.onChange(async (value) => {\n\t\t\t\t\t\tif (value == '') {\n\t\t\t\t\t\t\tvalue = SAMPLE_SMILES_2;\n\t\t\t\t\t\t}\n\t\t\t\t\t\tthis.plugin.settings.sample2 = value;\n\t\t\t\t\t\tawait this.plugin.saveSettings();\n\t\t\t\t\t\tonSettingsChange();\n\t\t\t\t\t})\n\t\t\t);\n\t\tconst preview = new LivePreview(containerEl, this.plugin.settings);",
"score": 12.30402850036659
},
{
"filename": "src/settings/SettingTab.ts",
"retrieved_chunk": "\t\t\t\t\t.setValue(this.plugin.settings.darkTheme)\n\t\t\t\t\t.onChange(async (value) => {\n\t\t\t\t\t\tthis.plugin.settings.darkTheme = value;\n\t\t\t\t\t\tawait this.plugin.saveSettings();\n\t\t\t\t\t\tonSettingsChange();\n\t\t\t\t\t})\n\t\t\t);\n\t\tnew Setting(containerEl)\n\t\t\t.setName(i18n.t('settings.preview.title'))\n\t\t\t.setHeading();",
"score": 11.984846529249692
}
] | typescript | ctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.
}; |
import { Plugin, MarkdownPostProcessorContext } from 'obsidian';
import {
DEFAULT_SETTINGS,
ChemPluginSettings,
SETTINGS_VERSION,
} from './settings/base';
import { ChemSettingTab } from './settings/SettingTab';
import { updateSettingsVersion } from './settings/update';
import { SmilesBlock } from './SmilesBlock';
import { setBlocks, clearBlocks } from './global/blocks';
import { setDrawer, clearDrawer } from './global/drawer';
import { setObserver, detachObserver } from './themeObserver';
export default class ChemPlugin extends Plugin {
settings: ChemPluginSettings;
async onload() {
await this.loadSettings();
// this.addRibbonIcon('hexagon', 'This is Chem Plugin', () => {});
// initialize global variables
setDrawer(this.settings.options);
setBlocks();
setObserver();
this.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));
this.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);
}
async onunload() {
detachObserver();
clearBlocks();
clearDrawer();
}
async loadSettings() {
const candidate = Object.assign({}, await this.loadData());
if ('version' in candidate && candidate.version == SETTINGS_VERSION)
this.settings = Object.assign({}, DEFAULT_SETTINGS, candidate);
else
this.settings = Object.assign(
{},
DEFAULT_SETTINGS,
updateSettingsVersion(candidate)
);
}
async saveSettings() {
await this.saveData(this.settings);
}
smilesProcessor = (
source: string,
el: HTMLElement,
ctx: MarkdownPostProcessorContext
) => {
ctx.addChild( | new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.
}; |
}
| src/main.ts | Acylation-obsidian-chem-54b1d05 | [
{
"filename": "src/SmilesBlock.ts",
"retrieved_chunk": "import { MarkdownRenderChild, MarkdownPostProcessorContext } from 'obsidian';\nimport { gDrawer } from './global/drawer';\nimport { ChemPluginSettings } from './settings/base';\nimport { addBlock, removeBlock } from './global/blocks';\nimport { i18n } from 'src/lib/i18n';\nexport class SmilesBlock extends MarkdownRenderChild {\n\tconstructor(\n\t\tprivate readonly el: HTMLElement,\n\t\tprivate readonly markdownSource: string,\n\t\tprivate readonly context: MarkdownPostProcessorContext,",
"score": 15.694248704817484
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t};\n\tprivate renderCell = (\n\t\tsource: string,\n\t\ttarget: HTMLElement,\n\t\tstyle: string\n\t) => {\n\t\tconst svg = target.createSvg('svg');\n\t\tgDrawer.draw(\n\t\t\tsource,\n\t\t\tsvg,",
"score": 15.075406960970254
},
{
"filename": "src/global/blocks.ts",
"retrieved_chunk": "};\n// TODO: late refresh\n// this manager will get all the living instances\n// however, we only want to see the instances refreshed when the file is activated\n// file info is in the ctx of the block, and activate attribute is in the leaf metadata\nexport const refreshBlocks = () => {\n\tgBlocks.forEach((block) => {\n\t\tblock.render();\n\t});\n};",
"score": 13.143943052828316
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\tstyle,\n\t\t\tnull,\n\t\t\t(error: object & { name: string; message: string }) => {\n\t\t\t\ttarget.empty();\n\t\t\t\tconst ErrorContainer = target.createEl('div');\n\t\t\t\tErrorContainer.createDiv('error-source').setText(\n\t\t\t\t\ti18n.t('errors.source.title', { source: source })\n\t\t\t\t);\n\t\t\t\tErrorContainer.createEl('br');\n\t\t\t\tconst ErrorInfo = ErrorContainer.createEl('details');",
"score": 11.733408211021676
},
{
"filename": "src/SmilesBlock.ts",
"retrieved_chunk": "\t\tprivate readonly settings: ChemPluginSettings\n\t) {\n\t\tsuper(el); // important\n\t\taddBlock(this);\n\t}\n\trender() {\n\t\t// TODO: rendering animation\n\t\tthis.el.empty();\n\t\tconst rows = this.markdownSource\n\t\t\t.split('\\n')",
"score": 10.381655862730593
}
] | typescript | new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.
}; |
import { App, PluginSettingTab, Setting, SliderComponent } from 'obsidian';
import ChemPlugin from '../main';
import {
DEFAULT_SD_OPTIONS,
SAMPLE_SMILES_1,
SAMPLE_SMILES_2,
themeList,
} from './base';
import { setDrawer } from 'src/global/drawer';
import { refreshBlocks } from 'src/global/blocks';
import { LivePreview } from './LivePreview';
import { i18n } from 'src/lib/i18n';
// Reference: https://smilesdrawer.surge.sh/playground.html
export class ChemSettingTab extends PluginSettingTab {
plugin: ChemPlugin;
constructor({ app, plugin }: { app: App; plugin: ChemPlugin }) {
super(app, plugin);
this.plugin = plugin;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
const scaleSetting = new Setting(containerEl)
.setName(i18n.t('settings.scale.name'))
.setDesc(i18n.t('settings.scale.description'))
.addExtraButton((button) => {
button
.setIcon('rotate-ccw')
.setTooltip(i18n.t('settings.scale.description'))
.onClick(async () => {
this.plugin.settings.options.scale = 1;
scaleSlider.setValue(50);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
unifyBondLength();
});
});
const scaleLabel = scaleSetting.controlEl.createDiv('slider-readout');
scaleLabel.setText(
(this.plugin.settings.options.scale ?? 1.0).toFixed(2).toString()
);
const scaleSlider = new SliderComponent(scaleSetting.controlEl)
.setValue(50 * (this.plugin.settings.options.scale ?? 1.0))
.setLimits(0.0, 100, 0.5)
.onChange(async (value) => {
this.plugin.settings.options.scale = value / 50;
scaleLabel.setText((value / 50).toFixed(2).toString());
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
if (value == 0) unifyImageWidth();
else unifyBondLength();
});
const widthSettings = new Setting(containerEl);
new Setting(containerEl)
.setName(i18n.t('settings.theme.light.name'))
.setDesc(i18n.t('settings.theme.light.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.lightTheme)
.onChange(async (value) => {
this.plugin.settings.lightTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.theme.dark.name'))
.setDesc(i18n.t('settings.theme.dark.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.darkTheme)
.onChange(async (value) => {
this.plugin.settings.darkTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.preview.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.preview.sample.name'))
.setDesc(i18n.t('settings.preview.sample.description'))
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_1)
.setValue(this.plugin.settings.sample1)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_1;
}
this.plugin.settings.sample1 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
)
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_2)
.setValue(this.plugin.settings.sample2)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_2;
}
this.plugin.settings.sample2 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
| const preview = new LivePreview(containerEl, this.plugin.settings); |
new Setting(containerEl)
.setName(i18n.t('settings.advanced.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.advanced.compact-drawing.name'))
.setDesc(i18n.t('settings.advanced.compact-drawing.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.compactDrawing ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.compactDrawing = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.terminal-carbons.name'))
.setDesc(i18n.t('settings.advanced.terminal-carbons.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.terminalCarbons ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.terminalCarbons = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
const onSettingsChange = () => {
preview.updateSettings(this.plugin.settings);
preview.render();
};
const unifyBondLength = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) =>
text
.setValue(
this.plugin.settings.options.width?.toString() ??
'300'
)
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.options.width =
parseInt(value);
this.plugin.settings.options.height =
parseInt(value);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
};
const unifyImageWidth = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) => {
text.setValue(
this.plugin.settings?.imgWidth.toString() ?? '300'
)
.setPlaceholder('300')
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.imgWidth = parseInt(value);
await this.plugin.saveSettings();
onSettingsChange();
});
});
};
// initialize
preview.render();
if ((this.plugin.settings.options?.scale ?? 1) == 0) unifyImageWidth();
else unifyBondLength();
}
hide(): void {
refreshBlocks();
}
}
| src/settings/SettingTab.ts | Acylation-obsidian-chem-54b1d05 | [
{
"filename": "src/main.ts",
"retrieved_chunk": "\tasync saveSettings() {\n\t\tawait this.saveData(this.settings);\n\t}\n\tsmilesProcessor = (\n\t\tsource: string,\n\t\tel: HTMLElement,\n\t\tctx: MarkdownPostProcessorContext\n\t) => {\n\t\tctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.\n\t};",
"score": 26.415338213573087
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tsetObserver();\n\t\tthis.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));\n\t\tthis.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);\n\t}\n\tasync onunload() {\n\t\tdetachObserver();\n\t\tclearBlocks();\n\t\tclearDrawer();\n\t}\n\tasync loadSettings() {",
"score": 22.419298662863103
},
{
"filename": "src/themeObserver.ts",
"retrieved_chunk": "\t\t\trefreshBlocks();\n\t\t} else if (\n\t\t\t// light -> empty -> dark\n\t\t\tmutation.oldValue?.contains('theme-light') && // key line, avoid calling twice\n\t\t\t!mutation.oldValue?.contains('theme-dark') &&\n\t\t\ttarget.classList.value.contains('theme-dark')\n\t\t) {\n\t\t\trefreshBlocks();\n\t\t}\n\t});",
"score": 14.242955211792296
},
{
"filename": "src/themeObserver.ts",
"retrieved_chunk": "import { refreshBlocks } from 'src/global/blocks';\nexport const themeObserver = new MutationObserver(function (mutations) {\n\tmutations.forEach(function (mutation) {\n\t\tconst target = mutation.target as HTMLElement;\n\t\tif (\n\t\t\t// dark -> dark & light -> light\n\t\t\tmutation.oldValue?.contains('theme-dark') &&\n\t\t\t!mutation.oldValue?.contains('theme-light') && // key line, avoid calling twice\n\t\t\ttarget.classList.value.contains('theme-light')\n\t\t) {",
"score": 12.996740009845576
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t);\n\t\tthis.darkCard.empty();\n\t\tconst darkWidth = this.renderCell(\n\t\t\tthis.settings.sample2,\n\t\t\tthis.darkCard,\n\t\t\tthis.settings.darkTheme\n\t\t);\n\t\tif (this.settings.options.scale == 0)\n\t\t\tthis.container.style.gridTemplateColumns = `repeat(auto-fill, minmax(${\n\t\t\t\tthis.settings?.imgWidth.toString() ?? '300'",
"score": 11.78166303583133
}
] | typescript | const preview = new LivePreview(containerEl, this.plugin.settings); |
import { App, PluginSettingTab, Setting, SliderComponent } from 'obsidian';
import ChemPlugin from '../main';
import {
DEFAULT_SD_OPTIONS,
SAMPLE_SMILES_1,
SAMPLE_SMILES_2,
themeList,
} from './base';
import { setDrawer } from 'src/global/drawer';
import { refreshBlocks } from 'src/global/blocks';
import { LivePreview } from './LivePreview';
import { i18n } from 'src/lib/i18n';
// Reference: https://smilesdrawer.surge.sh/playground.html
export class ChemSettingTab extends PluginSettingTab {
plugin: ChemPlugin;
constructor({ app, plugin }: { app: App; plugin: ChemPlugin }) {
super(app, plugin);
this.plugin = plugin;
}
display(): void {
const { containerEl } = this;
containerEl.empty();
const scaleSetting = new Setting(containerEl)
.setName(i18n.t('settings.scale.name'))
.setDesc(i18n.t('settings.scale.description'))
.addExtraButton((button) => {
button
.setIcon('rotate-ccw')
.setTooltip(i18n.t('settings.scale.description'))
.onClick(async () => {
this.plugin.settings.options.scale = 1;
scaleSlider.setValue(50);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
unifyBondLength();
});
});
const scaleLabel = scaleSetting.controlEl.createDiv('slider-readout');
scaleLabel.setText(
(this.plugin.settings.options.scale ?? 1.0).toFixed(2).toString()
);
const scaleSlider = new SliderComponent(scaleSetting.controlEl)
.setValue(50 * (this.plugin.settings.options.scale ?? 1.0))
.setLimits(0.0, 100, 0.5)
.onChange(async (value) => {
this.plugin.settings.options.scale = value / 50;
scaleLabel.setText((value / 50).toFixed(2).toString());
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
if (value == 0) unifyImageWidth();
else unifyBondLength();
});
const widthSettings = new Setting(containerEl);
new Setting(containerEl)
.setName(i18n.t('settings.theme.light.name'))
.setDesc(i18n.t('settings.theme.light.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.lightTheme)
.onChange(async (value) => {
this.plugin.settings.lightTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.theme.dark.name'))
.setDesc(i18n.t('settings.theme.dark.description'))
.addDropdown((dropdown) =>
dropdown
.addOptions(themeList)
.setValue(this.plugin.settings.darkTheme)
.onChange(async (value) => {
this.plugin.settings.darkTheme = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.preview.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.preview.sample.name'))
.setDesc(i18n.t('settings.preview.sample.description'))
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_1)
.setValue(this.plugin.settings.sample1)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_1;
}
this.plugin.settings.sample1 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
)
.addText((text) =>
text
.setPlaceholder(SAMPLE_SMILES_2)
.setValue(this.plugin.settings.sample2)
.onChange(async (value) => {
if (value == '') {
value = SAMPLE_SMILES_2;
}
this.plugin.settings.sample2 = value;
await this.plugin.saveSettings();
onSettingsChange();
})
);
const preview = new LivePreview(containerEl, this.plugin.settings);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.title'))
.setHeading();
new Setting(containerEl)
.setName(i18n.t('settings.advanced.compact-drawing.name'))
.setDesc(i18n.t('settings.advanced.compact-drawing.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.compactDrawing ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.compactDrawing = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
new Setting(containerEl)
.setName(i18n.t('settings.advanced.terminal-carbons.name'))
.setDesc(i18n.t('settings.advanced.terminal-carbons.description'))
.addToggle((toggle) =>
toggle
.setValue(
this.plugin.settings.options?.terminalCarbons ?? false
)
.onChange(async (value) => {
this.plugin.settings.options.terminalCarbons = value;
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
const onSettingsChange = () => {
preview.updateSettings(this.plugin.settings);
| preview.render(); |
};
const unifyBondLength = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) =>
text
.setValue(
this.plugin.settings.options.width?.toString() ??
'300'
)
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.options.width =
parseInt(value);
this.plugin.settings.options.height =
parseInt(value);
await this.plugin.saveSettings();
setDrawer({
...DEFAULT_SD_OPTIONS,
...this.plugin.settings.options,
});
onSettingsChange();
})
);
};
const unifyImageWidth = () => {
widthSettings.controlEl.empty();
widthSettings
.setName(i18n.t('settings.unify-bond-length.name'))
.setDesc(i18n.t('settings.unify-bond-length.description'))
.addText((text) => {
text.setValue(
this.plugin.settings?.imgWidth.toString() ?? '300'
)
.setPlaceholder('300')
.onChange(async (value) => {
if (value == '') {
value = '300';
}
this.plugin.settings.imgWidth = parseInt(value);
await this.plugin.saveSettings();
onSettingsChange();
});
});
};
// initialize
preview.render();
if ((this.plugin.settings.options?.scale ?? 1) == 0) unifyImageWidth();
else unifyBondLength();
}
hide(): void {
refreshBlocks();
}
}
| src/settings/SettingTab.ts | Acylation-obsidian-chem-54b1d05 | [
{
"filename": "src/global/drawer.ts",
"retrieved_chunk": "import { DEFAULT_SD_OPTIONS, SMILES_DRAWER_OPTIONS } from 'src/settings/base';\nimport SmilesDrawer from 'smiles-drawer';\nexport let gDrawer = new SmilesDrawer.SmiDrawer(DEFAULT_SD_OPTIONS);\nexport const setDrawer = (options: Partial<SMILES_DRAWER_OPTIONS>) => {\n\tgDrawer = new SmilesDrawer.SmiDrawer({ ...DEFAULT_SD_OPTIONS, ...options });\n};\nexport const clearDrawer = () => {\n\tgDrawer = {};\n};",
"score": 11.557394856999034
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\tasync saveSettings() {\n\t\tawait this.saveData(this.settings);\n\t}\n\tsmilesProcessor = (\n\t\tsource: string,\n\t\tel: HTMLElement,\n\t\tctx: MarkdownPostProcessorContext\n\t) => {\n\t\tctx.addChild(new SmilesBlock(el, source, ctx, this.settings)); // pass plugin settings, maybe useful in react settings provider.\n\t};",
"score": 11.245464302772167
},
{
"filename": "src/main.ts",
"retrieved_chunk": "\t\tsetObserver();\n\t\tthis.addSettingTab(new ChemSettingTab({ app: this.app, plugin: this }));\n\t\tthis.registerMarkdownCodeBlockProcessor('smiles', this.smilesProcessor);\n\t}\n\tasync onunload() {\n\t\tdetachObserver();\n\t\tclearBlocks();\n\t\tclearDrawer();\n\t}\n\tasync loadSettings() {",
"score": 10.503936371169631
},
{
"filename": "src/settings/LivePreview.ts",
"retrieved_chunk": "\t\t\tcls: 'chemcard theme-dark',\n\t\t});\n\t\tthis.settings = this.argSettings;\n\t}\n\trender = () => {\n\t\tthis.lightCard.empty();\n\t\tconst lightWidth = this.renderCell(\n\t\t\tthis.settings.sample1,\n\t\t\tthis.lightCard,\n\t\t\tthis.settings.lightTheme",
"score": 9.463270302746974
},
{
"filename": "src/main.ts",
"retrieved_chunk": "import { setDrawer, clearDrawer } from './global/drawer';\nimport { setObserver, detachObserver } from './themeObserver';\nexport default class ChemPlugin extends Plugin {\n\tsettings: ChemPluginSettings;\n\tasync onload() {\n\t\tawait this.loadSettings();\n\t\t// this.addRibbonIcon('hexagon', 'This is Chem Plugin', () => {});\n\t\t// initialize global variables\n\t\tsetDrawer(this.settings.options);\n\t\tsetBlocks();",
"score": 9.445722021264693
}
] | typescript | preview.render(); |
import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'
import { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { SpanExporter } from '@opentelemetry/sdk-trace-base'
import { unwrap } from './wrap.js'
export interface OTLPExporterConfig {
url: string
headers?: Record<string, string>
}
const defaultHeaders: Record<string, string> = {
accept: 'application/json',
'content-type': 'application/json',
}
export class OTLPExporter implements SpanExporter {
private headers: Record<string, string>
private url: string
constructor(config: OTLPExporterConfig) {
this.url = config.url
this.headers = Object.assign({}, defaultHeaders, config.headers)
}
export(items: any[], resultCallback: (result: ExportResult) => void): void {
this._export(items)
.then(() => {
resultCallback({ code: ExportResultCode.SUCCESS })
})
.catch((error: ExportServiceError) => {
resultCallback({ code: ExportResultCode.FAILED, error })
})
}
private _export(items: any[]): Promise<unknown> {
return new Promise<void>((resolve, reject) => {
try {
this.send(items, resolve, reject)
} catch (e) {
reject(e)
}
})
}
send(items: any[], onSuccess: () => void, onError: (error: OTLPExporterError) => void): void {
const exportMessage = createExportTraceServiceRequest(items, true)
const body = JSON.stringify(exportMessage)
const params: RequestInit = {
method: 'POST',
headers: this.headers,
body,
}
unwrap(fetch)(this.url, params)
.then( | (response) => { |
if (response.ok) {
onSuccess()
} else {
onError(new OTLPExporterError(`Exporter received a statusCode: ${response.status}`))
}
})
.catch((error) => {
onError(new OTLPExporterError(`Exception during export: ${error.toString()}`, error.code, error.stack))
})
}
async shutdown(): Promise<void> {}
}
| src/exporter.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst headers = request.headers\n\t// attrs[SemanticAttributes.HTTP_CLIENT_IP] = '1.1.1.1'\n\tattrs[SemanticAttributes.HTTP_METHOD] = request.method\n\tattrs[SemanticAttributes.HTTP_URL] = sanitiseURL(request.url)\n\tattrs[SemanticAttributes.HTTP_USER_AGENT] = headers.get('user-agent')!\n\tattrs[SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH] = headers.get('content-length')!\n\tattrs['http.request_content-type'] = headers.get('content-type')!\n\tattrs['http.accepts'] = headers.get('accepts')!\n\treturn attrs\n}",
"score": 19.449067872864696
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 16.964707380419856
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "export function gatherResponseAttributes(response: Response): Attributes {\n\tconst attrs: Record<string, string | number> = {}\n\tattrs[SemanticAttributes.HTTP_STATUS_CODE] = response.status\n\tattrs[SemanticAttributes.HTTP_RESPONSE_CONTENT_LENGTH] = response.headers.get('content-length')!\n\tattrs['http.response_content-type'] = response.headers.get('content-type')!\n\treturn attrs\n}\nexport function gatherIncomingCfAttributes(request: Request): Attributes {\n\tconst attrs: Record<string, string | number> = {}\n\tattrs[SemanticAttributes.HTTP_SCHEME] = request.cf?.httpProtocol as string",
"score": 16.740546258380526
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst workerConfig = getActiveConfig()\n\tconst acceptTraceContext =\n\t\ttypeof workerConfig.handlers.fetch.acceptTraceContext === 'function'\n\t\t\t? workerConfig.handlers.fetch.acceptTraceContext(request)\n\t\t\t: workerConfig.handlers.fetch.acceptTraceContext ?? true\n\treturn acceptTraceContext ? getParentContextFromHeaders(request.headers) : api_context.active()\n}\nexport function waitUntilTrace(fn: () => Promise<any>): Promise<void> {\n\tconst tracer = trace.getTracer('waitUntil')\n\treturn tracer.startActiveSpan('waitUntil', async (span) => {",
"score": 15.333137112009299
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tif (typeof value === 'string' || typeof value === 'number') {\n\t\t\tattrs[`cf.${key}`] = value\n\t\t} else {\n\t\t\tattrs[`cf.${key}`] = JSON.stringify(value)\n\t\t}\n\t})\n\treturn attrs\n}\nexport function gatherRequestAttributes(request: Request): Attributes {\n\tconst attrs: Record<string, string | number> = {}",
"score": 13.188304939975207
}
] | typescript | (response) => { |
import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'
import { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { SpanExporter } from '@opentelemetry/sdk-trace-base'
import { unwrap } from './wrap.js'
export interface OTLPExporterConfig {
url: string
headers?: Record<string, string>
}
const defaultHeaders: Record<string, string> = {
accept: 'application/json',
'content-type': 'application/json',
}
export class OTLPExporter implements SpanExporter {
private headers: Record<string, string>
private url: string
constructor(config: OTLPExporterConfig) {
this.url = config.url
this.headers = Object.assign({}, defaultHeaders, config.headers)
}
export(items: any[], resultCallback: (result: ExportResult) => void): void {
this._export(items)
.then(() => {
resultCallback({ code: ExportResultCode.SUCCESS })
})
.catch((error: ExportServiceError) => {
resultCallback({ code: ExportResultCode.FAILED, error })
})
}
private _export(items: any[]): Promise<unknown> {
return new Promise<void>((resolve, reject) => {
try {
this.send(items, resolve, reject)
} catch (e) {
reject(e)
}
})
}
send(items: any[], onSuccess: () => void, onError: (error: OTLPExporterError) => void): void {
const exportMessage = createExportTraceServiceRequest(items, true)
const body = JSON.stringify(exportMessage)
const params: RequestInit = {
method: 'POST',
headers: this.headers,
body,
}
unwrap(fetch)(this.url, params)
.then((response) => {
if (response.ok) {
onSuccess()
} else {
onError(new OTLPExporterError(`Exporter received a statusCode: ${response.status}`))
}
})
.catch( | (error) => { |
onError(new OTLPExporterError(`Exception during export: ${error.toString()}`, error.code, error.stack))
})
}
async shutdown(): Promise<void> {}
}
| src/exporter.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 29.69708675955026
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request)\n\t\t\tif (response.ok) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response",
"score": 25.29098849766487
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "export function gatherResponseAttributes(response: Response): Attributes {\n\tconst attrs: Record<string, string | number> = {}\n\tattrs[SemanticAttributes.HTTP_STATUS_CODE] = response.status\n\tattrs[SemanticAttributes.HTTP_RESPONSE_CONTENT_LENGTH] = response.headers.get('content-length')!\n\tattrs['http.response_content-type'] = response.headers.get('content-type')!\n\treturn attrs\n}\nexport function gatherIncomingCfAttributes(request: Request): Attributes {\n\tconst attrs: Record<string, string | number> = {}\n\tattrs[SemanticAttributes.HTTP_SCHEME] = request.cf?.httpProtocol as string",
"score": 20.85075702271674
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\t\t\t\tset: (h, k, v) => h.set(k, typeof v === 'string' ? v : String(v)),\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t\tspan.setAttributes(gatherRequestAttributes(request))\n\t\t\t\tif (request.cf) span.setAttributes(gatherOutgoingCfAttributes(request.cf))\n\t\t\t\tconst response: Response = await Reflect.apply(target, thisArg, [request])\n\t\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\t\tspan.end()\n\t\t\t\treturn response\n\t\t\t})",
"score": 18.365124503417718
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.inprogressExports.set(traceId, promise)\n\t\t\tpromise.then((result) => {\n\t\t\t\tif (result.code === ExportResultCode.FAILED) {\n\t\t\t\t\tconsole.log('Error sending spans to exporter:', result.error)\n\t\t\t\t}\n\t\t\t\tthis.action(traceId, { actionName: 'exportDone' })\n\t\t\t\tthis.inprogressExports.delete(traceId)\n\t\t\t})\n\t\t}\n\t}",
"score": 9.92182888378995
}
] | typescript | (error) => { |
import { trace } from '@opentelemetry/api'
import { WorkerTracer } from '../tracer.js'
import { passthroughGet, wrap } from '../wrap.js'
type ContextAndTracker = { ctx: ExecutionContext; tracker: PromiseTracker }
type WaitUntilFn = ExecutionContext['waitUntil']
export class PromiseTracker {
_outstandingPromises: Promise<unknown>[] = []
get outstandingPromiseCount() {
return this._outstandingPromises.length
}
track(promise: Promise<unknown>): void {
this._outstandingPromises.push(promise)
}
async wait() {
await allSettledMutable(this._outstandingPromises)
}
}
function createWaitUntil(fn: WaitUntilFn, context: ExecutionContext, tracker: PromiseTracker): WaitUntilFn {
const handler: ProxyHandler<WaitUntilFn> = {
apply(target, thisArg, argArray) {
tracker.track(argArray[0])
return Reflect.apply(target, context, argArray)
},
}
return wrap(fn, handler)
}
export function proxyExecutionContext(context: ExecutionContext): ContextAndTracker {
const tracker = new PromiseTracker()
const ctx = new Proxy(context, {
get(target, prop) {
if (prop === 'waitUntil') {
const fn = Reflect.get(target, prop)
return createWaitUntil(fn, context, tracker)
} else {
return passthroughGet(target, prop)
}
},
})
return { ctx, tracker }
}
export async function exportSpans(tracker?: PromiseTracker) {
const tracer = trace.getTracer('export')
if (tracer instanceof WorkerTracer) {
await scheduler.wait(1)
if (tracker) {
await tracker.wait()
}
| await tracer.spanProcessor.forceFlush()
} else { |
console.error('The global tracer is not of type WorkerTracer and can not export spans')
}
}
/** Like `Promise.allSettled`, but handles modifications to the promises array */
export async function allSettledMutable(promises: Promise<unknown>[]): Promise<PromiseSettledResult<unknown>[]> {
let values: PromiseSettledResult<unknown>[]
// when the length of the array changes, there has been a nested call to waitUntil
// and we should await the promises again
do {
values = await Promise.allSettled(promises)
} while (values.length !== promises.length)
return values
}
| src/instrumentation/common.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\t} finally {\n\t\t\t\torig_ctx.waitUntil(exportSpans(tracker))\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(queueFn, queueHandler)\n}\nfunction instrumentQueueSend(fn: Queue<unknown>['send'], name: string): Queue<unknown>['send'] {\n\tconst tracer = trace.getTracer('queueSender')\n\tconst handler: ProxyHandler<Queue<unknown>['send']> = {",
"score": 28.62941617029651
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst tracer = trace.getTracer('DO alarmHandler')\n\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {\n\t\tspan.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)\n\t\tcold_start = false\n\t\tspan.setAttribute('do.id', id.toString())\n\t\tif (id.name) span.setAttribute('do.name', id.name)\n\t\ttry {\n\t\t\tawait alarmFn()\n\t\t\tspan.end()",
"score": 22.997911000476016
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tawait fn()\n\t\tspan.end()\n\t})\n}\nlet cold_start = true\nexport function executeFetchHandler(fetchFn: FetchHandler, [request, env, ctx]: FetchHandlerArgs): Promise<Response> {\n\tconst spanContext = getParentContextFromRequest(request)\n\tconst tracer = trace.getTracer('fetchHandler')\n\tconst attributes = {\n\t\t[SemanticAttributes.FAAS_TRIGGER]: 'http',",
"score": 22.897693944417703
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "function instrumentQueueSendBatch(fn: Queue<unknown>['sendBatch'], name: string): Queue<unknown>['sendBatch'] {\n\tconst tracer = trace.getTracer('queueSender')\n\tconst handler: ProxyHandler<Queue<unknown>['sendBatch']> = {\n\t\tapply: (target, thisArg, argArray) => {\n\t\t\treturn tracer.startActiveSpan(`queueSendBatch: ${name}`, async (span) => {\n\t\t\t\tspan.setAttribute('queue.operation', 'sendBatch')\n\t\t\t\tawait Reflect.apply(target, unwrap(thisArg), argArray)\n\t\t\t\tspan.end()\n\t\t\t})\n\t\t},",
"score": 21.872940095902855
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\t\tthrow error\n\t\t\t} finally {\n\t\t\t\torig_ctx.waitUntil(exportSpans(tracker))\n\t\t\t}\n\t\t},\n\t}\n\treturn wrap(fetchFn, fetchHandler)\n}\ntype getFetchConfig = (config: ResolvedTraceConfig) => FetcherConfig\nexport function instrumentFetcher(",
"score": 21.159521293683444
}
] | typescript | await tracer.spanProcessor.forceFlush()
} else { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: | DOConstructorTrigger = { |
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nexport function instrumentDO(doClass: DOClass, config: ConfigurationOption) {\n\tconst initialiser = createInitialiser(config)\n\treturn instrumentDOClass(doClass, initialiser)\n}\nexport { waitUntilTrace } from './instrumentation/fetch.js'",
"score": 45.05527618244727
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "import { instrumentGlobalCache } from './instrumentation/cache.js'\nimport { createQueueHandler } from './instrumentation/queue.js'\nimport { DOClass, instrumentDOClass } from './instrumentation/do.js'\ntype FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>\ntype QueueHandler = ExportedHandlerQueueHandler\nexport type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig\nexport type ConfigurationOption = TraceConfig | ResolveConfigFn\nexport function isRequest(trigger: Trigger): trigger is Request {\n\treturn trigger instanceof Request\n}",
"score": 26.077526841386156
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 15.703425380986843
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nfunction createInitialiser(config: ConfigurationOption): Initialiser {\n\tif (typeof config === 'function') {\n\t\treturn (env, trigger) => {\n\t\t\tconst conf = parseConfig(config(env, trigger))\n\t\t\tinit(conf)\n\t\t\treturn conf\n\t\t}\n\t} else {\n\t\treturn () => {",
"score": 14.775726582363099
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tconst [batch, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, batch)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: QueueHandlerArgs = [batch, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeQueueHandler, undefined, target, args)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 14.672886044536732
}
] | typescript | DOConstructorTrigger = { |
import {
trace,
SpanOptions,
SpanKind,
propagation,
context as api_context,
Attributes,
Exception,
Context,
SpanStatusCode,
} from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { Initialiser, getActiveConfig, setConfig } from '../config.js'
import { wrap } from '../wrap.js'
import { instrumentEnv } from './env.js'
import { exportSpans, proxyExecutionContext } from './common.js'
import { ResolvedTraceConfig } from '../types.js'
export type IncludeTraceContextFn = (request: Request) => boolean
export interface FetcherConfig {
includeTraceContext?: boolean | IncludeTraceContextFn
}
export type AcceptTraceContextFn = (request: Request) => boolean
export interface FetchHandlerConfig {
/**
* Whether to enable context propagation for incoming requests to `fetch`.
* This enables or disables distributed tracing from W3C Trace Context headers.
* @default true
*/
acceptTraceContext?: boolean | AcceptTraceContextFn
}
type FetchHandler = ExportedHandlerFetchHandler
type FetchHandlerArgs = Parameters<FetchHandler>
export function sanitiseURL(url: string): string {
const u = new URL(url)
return `${u.protocol}//${u.host}${u.pathname}${u.search}`
}
const gatherOutgoingCfAttributes = (cf: RequestInitCfProperties): Attributes => {
const attrs: Record<string, string | number> = {}
Object.keys(cf).forEach((key) => {
const value = cf[key]
if (typeof value === 'string' || typeof value === 'number') {
attrs[`cf.${key}`] = value
} else {
attrs[`cf.${key}`] = JSON.stringify(value)
}
})
return attrs
}
export function gatherRequestAttributes(request: Request): Attributes {
const attrs: Record<string, string | number> = {}
const headers = request.headers
// attrs[SemanticAttributes.HTTP_CLIENT_IP] = '1.1.1.1'
attrs[SemanticAttributes.HTTP_METHOD] = request.method
attrs[SemanticAttributes.HTTP_URL] = sanitiseURL(request.url)
attrs[SemanticAttributes.HTTP_USER_AGENT] = headers.get('user-agent')!
attrs[SemanticAttributes.HTTP_REQUEST_CONTENT_LENGTH] = headers.get('content-length')!
attrs['http.request_content-type'] = headers.get('content-type')!
attrs['http.accepts'] = headers.get('accepts')!
return attrs
}
export function gatherResponseAttributes(response: Response): Attributes {
const attrs: Record<string, string | number> = {}
attrs[SemanticAttributes.HTTP_STATUS_CODE] = response.status
attrs[SemanticAttributes.HTTP_RESPONSE_CONTENT_LENGTH] = response.headers.get('content-length')!
attrs['http.response_content-type'] = response.headers.get('content-type')!
return attrs
}
export function gatherIncomingCfAttributes(request: Request): Attributes {
const attrs: Record<string, string | number> = {}
attrs[SemanticAttributes.HTTP_SCHEME] = request.cf?.httpProtocol as string
attrs['net.colo'] = request.cf?.colo as string
attrs['net.country'] = request.cf?.country as string
attrs['net.request_priority'] = request.cf?.requestPriority as string
attrs['net.tls_cipher'] = request.cf?.tlsCipher as string
attrs['net.tls_version'] = request.cf?.tlsVersion as string
attrs['net.asn'] = request.cf?.asn as number
attrs['net.tcp_rtt'] = request.cf?.clientTcpRtt as number
return attrs
}
export function getParentContextFromHeaders(headers: Headers): Context {
return propagation.extract(api_context.active(), headers, {
get(headers, key) {
return headers.get(key) || undefined
},
keys(headers) {
return [...headers.keys()]
},
})
}
function getParentContextFromRequest(request: Request) {
const workerConfig = getActiveConfig()
const acceptTraceContext =
typeof workerConfig.handlers.fetch.acceptTraceContext === 'function'
? workerConfig.handlers.fetch.acceptTraceContext(request)
: workerConfig.handlers.fetch.acceptTraceContext ?? true
return acceptTraceContext ? getParentContextFromHeaders(request.headers) : api_context.active()
}
export function waitUntilTrace(fn: () => Promise<any>): Promise<void> {
const tracer = trace.getTracer('waitUntil')
return tracer.startActiveSpan('waitUntil', async (span) => {
await fn()
span.end()
})
}
let cold_start = true
export function executeFetchHandler(fetchFn: FetchHandler, [request, env, ctx]: FetchHandlerArgs): Promise<Response> {
const spanContext = getParentContextFromRequest(request)
const tracer = trace.getTracer('fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
[SemanticAttributes.FAAS_EXECUTION]: request.headers.get('cf-ray') ?? undefined,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request, env, ctx)
if (response.status < 500) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
| export function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) { |
const fetchHandler: ProxyHandler<FetchHandler> = {
apply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {
const [request, orig_env, orig_ctx] = argArray
const config = initialiser(orig_env as Record<string, unknown>, request)
const env = instrumentEnv(orig_env as Record<string, unknown>)
const { ctx, tracker } = proxyExecutionContext(orig_ctx)
const context = setConfig(config)
try {
const args: FetchHandlerArgs = [request, env, ctx]
return await api_context.with(context, executeFetchHandler, undefined, target, args)
} catch (error) {
throw error
} finally {
orig_ctx.waitUntil(exportSpans(tracker))
}
},
}
return wrap(fetchFn, fetchHandler)
}
type getFetchConfig = (config: ResolvedTraceConfig) => FetcherConfig
export function instrumentFetcher(
fetchFn: Fetcher['fetch'],
configFn: getFetchConfig,
attrs?: Attributes
): Fetcher['fetch'] {
const handler: ProxyHandler<typeof fetch> = {
apply: (target, thisArg, argArray): ReturnType<typeof fetch> => {
const workerConfig = getActiveConfig()
const config = configFn(workerConfig)
const request = new Request(argArray[0], argArray[1])
const tracer = trace.getTracer('fetcher')
const options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }
const host = new URL(request.url).host
const spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`
const promise = tracer.startActiveSpan(spanName, options, async (span) => {
const includeTraceContext =
typeof config.includeTraceContext === 'function'
? config.includeTraceContext(request)
: config.includeTraceContext
if (includeTraceContext ?? true) {
propagation.inject(api_context.active(), request.headers, {
set: (h, k, v) => h.set(k, typeof v === 'string' ? v : String(v)),
})
}
span.setAttributes(gatherRequestAttributes(request))
if (request.cf) span.setAttributes(gatherOutgoingCfAttributes(request.cf))
const response: Response = await Reflect.apply(target, thisArg, [request])
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
})
return promise
},
}
return wrap(fetchFn, handler, true)
}
export function instrumentGlobalFetch(): void {
globalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)
}
| src/instrumentation/fetch.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nfunction instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {",
"score": 69.13946895398149
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {",
"score": 61.82921893848121
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\ttry {\n\t\t\tconst result = queueFn(batch, env, ctx)\n\t\t\tawait span.setAttribute('queue.implicitly_acked', count.total - count.succeeded - count.failed)\n\t\t\tcount.ackRemaining()\n\t\t\tspan.setAttributes(count.toAttributes())\n\t\t\tspan.end()\n\t\t\treturn result\n\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setAttribute('queue.implicitly_retried', count.total - count.succeeded - count.failed)",
"score": 32.7867927217466
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 32.164999813242595
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request)\n\t\t\tif (response.ok) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response",
"score": 27.692167093810014
}
] | typescript | export function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: | DOClass, initialiser: Initialiser): DOClass { |
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nexport function instrumentDO(doClass: DOClass, config: ConfigurationOption) {\n\tconst initialiser = createInitialiser(config)\n\treturn instrumentDOClass(doClass, initialiser)\n}\nexport { waitUntilTrace } from './instrumentation/fetch.js'",
"score": 33.49025067638842
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\t\t\tspan.setAttribute('hasResult', !!result)\n\t\t\t\tspan.end()\n\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, fnHandler)\n}\nexport function instrumentKV(kv: KVNamespace, name: string): KVNamespace {\n\tconst kvHandler: ProxyHandler<KVNamespace> = {",
"score": 14.168588791409677
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "import { instrumentGlobalCache } from './instrumentation/cache.js'\nimport { createQueueHandler } from './instrumentation/queue.js'\nimport { DOClass, instrumentDOClass } from './instrumentation/do.js'\ntype FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>\ntype QueueHandler = ExportedHandlerQueueHandler\nexport type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig\nexport type ConfigurationOption = TraceConfig | ResolveConfigFn\nexport function isRequest(trigger: Trigger): trigger is Request {\n\treturn trigger instanceof Request\n}",
"score": 13.948967836091747
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\treturn attrs\n\t},\n\tgetWithMetadata(argArray, result) {\n\t\tconst attrs = this.get(argArray, result)\n\t\tattrs['withMetadata'] = true\n\t\treturn attrs\n\t},\n\tlist(argArray, result) {\n\t\tconst attrs: Attributes = {}\n\t\tconst opts: KVNamespaceListOptions = argArray[0] || {}",
"score": 11.207858711441151
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 11.159601260661065
}
] | typescript | DOClass, initialiser: Initialiser): DOClass { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn( | fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn { |
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 64.59558623886228
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 35.57791346580148
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 29.64357883201461
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\ttry {\n\t\t\tconst result = queueFn(batch, env, ctx)\n\t\t\tawait span.setAttribute('queue.implicitly_acked', count.total - count.succeeded - count.failed)\n\t\t\tcount.ackRemaining()\n\t\t\tspan.setAttributes(count.toAttributes())\n\t\t\tspan.end()\n\t\t\treturn result\n\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setAttribute('queue.implicitly_retried', count.total - count.succeeded - count.failed)",
"score": 29.009462013630174
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tconst [batch, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, batch)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: QueueHandlerArgs = [batch, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeQueueHandler, undefined, target, args)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 17.866979656629724
}
] | typescript | fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return | instrumentStorage(result)
} else if (typeof result === 'function') { |
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 50.580437227551606
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentKVFn(fn, name, operation)\n\t\t},\n\t}\n\treturn wrap(kv, kvHandler)\n}",
"score": 41.24346273656412
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "\tproxyHandler.get = (target, prop, receiver) => {\n\t\tif (prop === unwrapSymbol) {\n\t\t\treturn item\n\t\t} else {\n\t\t\tif (handler.get) {\n\t\t\t\treturn handler.get(target, prop, receiver)\n\t\t\t} else if (autoPassthrough) {\n\t\t\t\treturn passthroughGet(target, prop)\n\t\t\t}\n\t\t}",
"score": 40.82491861748149
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}\nfunction instrumentCache(cache: Cache, cacheName: string): Cache {\n\tconst handler: ProxyHandler<typeof cache> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'delete' || prop === 'match' || prop === 'put') {",
"score": 39.368536489273644
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\tconst handler: ProxyHandler<typeof caches> = {\n\t\tget(target, prop) {\n\t\t\tif (prop === 'default') {\n\t\t\t\tconst cache = target.default\n\t\t\t\treturn instrumentCache(cache, 'default')\n\t\t\t} else if (prop === 'open') {\n\t\t\t\tconst openFn = Reflect.get(target, prop).bind(target)\n\t\t\t\treturn instrumentOpen(openFn)\n\t\t\t} else {\n\t\t\t\treturn Reflect.get(target, prop)",
"score": 37.45666917139095
}
] | typescript | instrumentStorage(result)
} else if (typeof result === 'function') { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
| const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') { |
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 43.49650483211849
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentKVFn(fn, name, operation)\n\t\t},\n\t}\n\treturn wrap(kv, kvHandler)\n}",
"score": 39.63746760782965
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "\tproxyHandler.get = (target, prop, receiver) => {\n\t\tif (prop === unwrapSymbol) {\n\t\t\treturn item\n\t\t} else {\n\t\t\tif (handler.get) {\n\t\t\t\treturn handler.get(target, prop, receiver)\n\t\t\t} else if (autoPassthrough) {\n\t\t\t\treturn passthroughGet(target, prop)\n\t\t\t}\n\t\t}",
"score": 35.2284481738045
},
{
"filename": "src/instrumentation/env.ts",
"retrieved_chunk": "const isDurableObject = (item?: unknown): item is DurableObjectNamespace => {\n\treturn !!(item as DurableObjectNamespace)?.idFromName\n}\nconst instrumentEnv = (env: Record<string, unknown>): Record<string, unknown> => {\n\tconst envHandler: ProxyHandler<Record<string, unknown>> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst item = Reflect.get(target, prop, receiver)\n\t\t\tif (isKVNamespace(item)) {\n\t\t\t\treturn instrumentKV(item, String(prop))\n\t\t\t} else if (isQueue(item)) {",
"score": 33.57697507677733
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\t\tconst messagesHandler: ProxyHandler<MessageBatch['messages']> = {\n\t\t\t\t\tget: (target, prop) => {\n\t\t\t\t\t\tif (typeof prop === 'string' && !isNaN(parseInt(prop))) {\n\t\t\t\t\t\t\tconst message = Reflect.get(target, prop)\n\t\t\t\t\t\t\treturn proxyQueueMessage(message, count)\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\treturn Reflect.get(target, prop)\n\t\t\t\t\t\t}\n\t\t\t\t\t},\n\t\t\t\t}",
"score": 31.014247978791744
}
] | typescript | const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const | env = instrumentEnv(orig_env)
const createDO = () => { |
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tconst [batch, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, batch)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: QueueHandlerArgs = [batch, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeQueueHandler, undefined, target, args)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 37.49685618525967
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 35.77118137531892
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\t'queue.messages_success': this.succeeded,\n\t\t\t'queue.messages_failed': this.failed,\n\t\t\t'queue.batch_success': this.succeeded === this.total,\n\t\t}\n\t}\n}\nconst addEvent = (name: string, msg?: Message) => {\n\tconst attrs: Attributes = {}\n\tif (msg) {\n\t\tattrs['queue.message_id'] = msg.id",
"score": 25.92944917522607
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nfunction createInitialiser(config: ConfigurationOption): Initialiser {\n\tif (typeof config === 'function') {\n\t\treturn (env, trigger) => {\n\t\t\tconst conf = parseConfig(config(env, trigger))\n\t\t\tinit(conf)\n\t\t\treturn conf\n\t\t}\n\t} else {\n\t\treturn () => {",
"score": 22.62338406430813
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {\n\treturn !!(trigger as MessageBatch).ackAll\n}\nexport function isAlarm(trigger: Trigger): trigger is 'do-alarm' {\n\treturn trigger === 'do-alarm'\n}\nconst createResource = (config: ResolvedTraceConfig): Resource => {\n\tconst workerResourceAttrs = {\n\t\t[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',\n\t\t[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',",
"score": 22.578518344630766
}
] | typescript | env = instrumentEnv(orig_env)
const createDO = () => { |
/* eslint-disable @next/next/no-img-element */
import { DialogueElement } from "@/types/DialogueElement";
import { AvatarIcon } from "../AvatarIcon";
import { EmojiWrap } from "../EmojiWrap";
export const DialogueElementItem: React.FC<{
dialogueElement: DialogueElement;
dialogueIndex: number;
isResponding: boolean;
}> = ({ dialogueElement, dialogueIndex, isResponding }) => {
return (
<div
className={`dialogueElementItem ${
dialogueElement.who === "assistant"
? "dialogueElementItemAssistant"
: "dialogueElementItemHuman"
}`}
key={dialogueIndex}
>
<div
className="avatarIconWrap"
style={{
display: "flex",
justifyItems: "center",
flexShrink: 0,
}}
>
<AvatarIcon who={dialogueElement.who} />
</div>
<div className="dialogueElementWrap">
<div
className="dialogueTextWrap"
style={{
paddingLeft: "5px",
paddingRight: "5px",
flexGrow: 1,
maxWidth: "100%",
}}
>
{dialogueElement.text?.split("\n").map((row, rowIdx) => {
return (
<div
className="dialogueTextRow"
key={`${dialogueIndex}-${rowIdx}`}
style={{
minHeight: "1em",
maxWidth: "100%",
wordBreak: "break-all",
}}
>
{row}
{isResponding &&
rowIdx === dialogueElement.text.split("\n").length - 1 && (
<span className="blinkingCursor" />
)}
</div>
);
})}
{!isResponding &&
dialogueElement.textEnd?.split("\n").map((row, rowIdx) => {
return (
<div
key={`${dialogueIndex}-${rowIdx}-end`}
style={{
minHeight: "1em",
marginLeft: row.startsWith(" ") ? "1em" : "0px",
}}
>
{row}
</div>
);
})}
</div>
{!isResponding && (
<div
className="dialogueEmojiListWrap"
style={{
position: "relative",
top: 0,
display: "flex",
padding: "6px 0",
height: "auto",
}}
>
{dialogueElement.emojiList.map((emojiValue) => {
return (
< | EmojiWrap
key={emojiValue.name} |
emoji={emojiValue.name}
count={emojiValue.count}
/>
);
})}
</div>
)}
</div>
</div>
);
};
| src/components/DialogueElementItem/index.tsx | yuiseki-LUNATIC-0c1872d | [
{
"filename": "src/components/EmojiWrap/index.tsx",
"retrieved_chunk": " cursor: \"pointer\",\n fontFamily: \"sans-serif, emoji\",\n display: \"flex\",\n padding: \"0 8px\",\n justifyContent: \"center\",\n alignItems: \"center\",\n zIndex: 50,\n }}\n >\n <span",
"score": 15.137641910635736
},
{
"filename": "src/app/page.tsx",
"retrieved_chunk": " <main suppressHydrationWarning className=\"main\">\n <div\n className=\"dialogueListWrap\"\n style={{\n width: \"100%\",\n margin: \"0 auto 5em\",\n }}\n >\n {dialogueList.map((dialogueElement, dialogueIndex) => {\n return (",
"score": 13.291194022129922
},
{
"filename": "src/app/page.tsx",
"retrieved_chunk": " {\n who: \"assistant\",\n text: greeting,\n emojiList: emojiNames.map((emojiName) => {\n return {\n name: emojiName,\n count: Math.floor(Math.random() * (1000 - 0 + 1) + 0),\n };\n }),\n },",
"score": 11.295511866162208
},
{
"filename": "src/components/TextInput/index.tsx",
"retrieved_chunk": " style={{\n display: \"block\",\n padding: \"4px\",\n height: \"34px\",\n width: \"34px\",\n }}\n >\n <img\n style={{ height: \"24px\", width: \"24px\" }}\n src=\"https://i.gyazo.com/1e58e82090fc6f9b140e23fc03faefc7.png\"",
"score": 11.162277623187903
},
{
"filename": "src/app/page.tsx",
"retrieved_chunk": " {\n who: \"assistant\",\n text: surfaceResJson.surface,\n emojiList: Object.keys(emojiDict)\n .sort(() => Math.random() - 0.5)\n .slice(0, 4)\n .map((emojiName) => {\n return {\n name: emojiName,\n count: Math.floor(Math.random() * (1000 - 0 + 1) + 0),",
"score": 9.855349243728455
}
] | typescript | EmojiWrap
key={emojiValue.name} |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
| const context = setConfig(config)
try { |
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 48.69795057246753
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tconst [batch, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, batch)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: QueueHandlerArgs = [batch, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeQueueHandler, undefined, target, args)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 36.399776329547095
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 35.2783781660228
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 32.79997706388592
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 30.56847141397522
}
] | typescript | const context = setConfig(config)
try { |
import { trace, SpanOptions, SpanKind, Attributes, Exception, context as api_context } from '@opentelemetry/api'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans, proxyExecutionContext } from './common.js'
import { instrumentEnv } from './env.js'
import { unwrap, wrap } from '../wrap.js'
type QueueHandler = ExportedHandlerQueueHandler<unknown, unknown>
export type QueueHandlerArgs = Parameters<QueueHandler>
const traceIdSymbol = Symbol('traceId')
class MessageStatusCount {
succeeded = 0
failed = 0
readonly total: number
constructor(total: number) {
this.total = total
}
ack() {
this.succeeded = this.succeeded + 1
}
ackRemaining() {
this.succeeded = this.total - this.failed
}
retry() {
this.failed = this.failed + 1
}
retryRemaining() {
this.failed = this.total - this.succeeded
}
toAttributes(): Attributes {
return {
'queue.messages_count': this.total,
'queue.messages_success': this.succeeded,
'queue.messages_failed': this.failed,
'queue.batch_success': this.succeeded === this.total,
}
}
}
const addEvent = (name: string, msg?: Message) => {
const attrs: Attributes = {}
if (msg) {
attrs['queue.message_id'] = msg.id
attrs['queue.message_timestamp'] = msg.timestamp.toISOString()
}
trace.getActiveSpan()?.addEvent(name, attrs)
}
const proxyQueueMessage = <Q>(msg: Message<Q>, count: MessageStatusCount): Message<Q> => {
const msgHandler: ProxyHandler<Message<Q>> = {
get: (target, prop) => {
if (prop === 'ack') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('messageAck', msg)
count.ack()
//TODO: handle errors
Reflect.apply(fnTarget, msg, [])
},
})
} else if (prop === 'retry') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('messageRetry', msg)
count.retry()
//TODO: handle errors
const result = Reflect.apply(fnTarget, msg, [])
return result
},
})
} else {
return Reflect.get(target, prop, msg)
}
},
}
return wrap(msg, msgHandler)
}
const proxyMessageBatch = <E, Q>(batch: MessageBatch, count: MessageStatusCount) => {
const batchHandler: ProxyHandler<MessageBatch> = {
get: (target, prop) => {
if (prop === 'messages') {
const messages = Reflect.get(target, prop)
const messagesHandler: ProxyHandler<MessageBatch['messages']> = {
get: (target, prop) => {
if (typeof prop === 'string' && !isNaN(parseInt(prop))) {
const message = Reflect.get(target, prop)
return proxyQueueMessage(message, count)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(messages, messagesHandler)
} else if (prop === 'ackAll') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('ackAll')
count.ackRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
} else if (prop === 'retryAll') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('retryAll')
count.retryRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
}
return Reflect.get(target, prop)
},
}
return wrap(batch, batchHandler)
}
export function executeQueueHandler(queueFn: QueueHandler, [batch, env, ctx]: QueueHandlerArgs): Promise<void> {
const count = new MessageStatusCount(batch.messages.length)
batch = proxyMessageBatch(batch, count)
const tracer = trace.getTracer('queueHandler')
const options: SpanOptions = {
attributes: {
'queue.name': batch.queue,
},
kind: SpanKind.CONSUMER,
}
const promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {
const traceId = span.spanContext().traceId
api_context.active().setValue(traceIdSymbol, traceId)
try {
const result = queueFn(batch, env, ctx)
await span.setAttribute('queue.implicitly_acked', count.total - count.succeeded - count.failed)
count.ackRemaining()
span.setAttributes(count.toAttributes())
span.end()
return result
} catch (error) {
span.recordException(error as Exception)
span.setAttribute('queue.implicitly_retried', count.total - count.succeeded - count.failed)
count.retryRemaining()
span.end()
throw error
}
})
return promise
}
| export function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) { |
const queueHandler: ProxyHandler<QueueHandler> = {
async apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {
const [batch, orig_env, orig_ctx] = argArray
const config = initialiser(orig_env as Record<string, unknown>, batch)
const env = instrumentEnv(orig_env as Record<string, unknown>)
const { ctx, tracker } = proxyExecutionContext(orig_ctx)
const context = setConfig(config)
try {
const args: QueueHandlerArgs = [batch, env, ctx]
return await api_context.with(context, executeQueueHandler, undefined, target, args)
} catch (error) {
throw error
} finally {
orig_ctx.waitUntil(exportSpans(tracker))
}
},
}
return wrap(queueFn, queueHandler)
}
function instrumentQueueSend(fn: Queue<unknown>['send'], name: string): Queue<unknown>['send'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['send']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'send')
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
return wrap(fn, handler)
}
function instrumentQueueSendBatch(fn: Queue<unknown>['sendBatch'], name: string): Queue<unknown>['sendBatch'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['sendBatch']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSendBatch: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'sendBatch')
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
return wrap(fn, handler)
}
export function instrumentQueueSender(queue: Queue<unknown>, name: string) {
const queueHandler: ProxyHandler<Queue<unknown>> = {
get: (target, prop) => {
if (prop === 'send') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSend(sendFn, name)
} else if (prop === 'sendBatch') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSendBatch(sendFn, name)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(queue, queueHandler)
}
| src/instrumentation/queue.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 43.662883223188764
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nfunction instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {",
"score": 42.84547403973148
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t} catch (error) {\n\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {",
"score": 38.50492249965925
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\tconst initialiser = createInitialiser(config)\n\tif (handler.fetch) {\n\t\tconst fetcher = unwrap(handler.fetch) as FetchHandler\n\t\thandler.fetch = createFetchHandler(fetcher, initialiser)\n\t}\n\tif (handler.queue) {\n\t\tconst queuer = unwrap(handler.queue) as QueueHandler\n\t\thandler.queue = createQueueHandler(queuer, initialiser)\n\t}\n\treturn handler",
"score": 22.184127698955276
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 20.1663105173622
}
] | typescript | export function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
| const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
} |
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 29.835767376657067
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 27.42893508794482
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tthis._spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}\n\tget spanProcessor() {\n\t\treturn this._spanProcessor\n\t}\n\taddToResource(extra: Resource) {\n\t\tthis.resource.merge(extra)\n\t}\n\tstartSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {",
"score": 20.454980089955633
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 19.044711304257966
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 18.86513168470985
}
] | typescript | const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
| export function instrumentDO(doClass: DOClass, config: ConfigurationOption) { |
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 35.02158871150252
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 33.965354096612586
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "\t}\n\tproxyHandler.apply = (target, thisArg, argArray) => {\n\t\tif (handler.apply) {\n\t\t\treturn handler.apply(unwrap(target), unwrap(thisArg), argArray)\n\t\t}\n\t}\n\treturn new Proxy(item, proxyHandler)\n}\nexport function unwrap<T extends object>(item: T): T {\n\tif (item && isWrapped(item)) {",
"score": 32.40347036912067
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\tapply: (target, thisArg, argArray) => {\n\t\t\treturn tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {\n\t\t\t\tspan.setAttribute('queue.operation', 'send')\n\t\t\t\tawait Reflect.apply(target, unwrap(thisArg), argArray)\n\t\t\t\tspan.end()\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}",
"score": 30.93348752667858
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "const unwrapSymbol = Symbol('unwrap')\ntype Wrapped<T> = { [unwrapSymbol]: T } & T\nexport function isWrapped<T>(item: T): item is Wrapped<T> {\n\treturn !!(item as Wrapped<T>)[unwrapSymbol]\n}\nexport function wrap<T extends object>(item: T, handler: ProxyHandler<T>, autoPassthrough: boolean = true): T {\n\tif (isWrapped(item)) {\n\t\treturn item\n\t}\n\tconst proxyHandler = Object.assign({}, handler)",
"score": 30.251358071334174
}
] | typescript | export function instrumentDO(doClass: DOClass, config: ConfigurationOption) { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
| function createInitialiser(config: ConfigurationOption): Initialiser { |
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 40.71626468966795
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 31.095149081770685
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 28.133884215138043
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "type StartExportArguments = {\n\texporter: SpanExporter\n\ttailSampler: TailSampleFn\n\tpostProcessor: PostProcessorFn\n}\ntype StartSpanAction = Action<'startSpan', { span: Span }>\ntype EndSpanAction = Action<'endSpan', { span: ReadableSpan }>\ntype StartExportAction = Action<'startExport', { args: StartExportArguments }>\nfunction newTrace(currentState: InitialState, { span }: StartSpanAction): InProgressTraceState {\n\tconst spanId = span.spanContext().spanId",
"score": 16.89964921356581
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "import { TraceFlags, SpanStatusCode } from '@opentelemetry/api'\nimport { ReadableSpan } from '@opentelemetry/sdk-trace-base'\nexport interface LocalTrace {\n\treadonly traceId: string\n\treadonly localRootSpan: ReadableSpan\n\treadonly spans: ReadableSpan[]\n}\nexport type TailSampleFn = (traceInfo: LocalTrace) => boolean\nexport function multiTailSampler(samplers: TailSampleFn[]): TailSampleFn {\n\treturn (traceInfo) => {",
"score": 16.01888335123503
}
] | typescript | function createInitialiser(config: ConfigurationOption): Initialiser { |
import { trace, SpanOptions, SpanKind, Attributes, Exception, context as api_context } from '@opentelemetry/api'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans, proxyExecutionContext } from './common.js'
import { instrumentEnv } from './env.js'
import { unwrap, wrap } from '../wrap.js'
type QueueHandler = ExportedHandlerQueueHandler<unknown, unknown>
export type QueueHandlerArgs = Parameters<QueueHandler>
const traceIdSymbol = Symbol('traceId')
class MessageStatusCount {
succeeded = 0
failed = 0
readonly total: number
constructor(total: number) {
this.total = total
}
ack() {
this.succeeded = this.succeeded + 1
}
ackRemaining() {
this.succeeded = this.total - this.failed
}
retry() {
this.failed = this.failed + 1
}
retryRemaining() {
this.failed = this.total - this.succeeded
}
toAttributes(): Attributes {
return {
'queue.messages_count': this.total,
'queue.messages_success': this.succeeded,
'queue.messages_failed': this.failed,
'queue.batch_success': this.succeeded === this.total,
}
}
}
const addEvent = (name: string, msg?: Message) => {
const attrs: Attributes = {}
if (msg) {
attrs['queue.message_id'] = msg.id
attrs['queue.message_timestamp'] = msg.timestamp.toISOString()
}
trace.getActiveSpan()?.addEvent(name, attrs)
}
const proxyQueueMessage = <Q>(msg: Message<Q>, count: MessageStatusCount): Message<Q> => {
const msgHandler: ProxyHandler<Message<Q>> = {
get: (target, prop) => {
if (prop === 'ack') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('messageAck', msg)
count.ack()
//TODO: handle errors
Reflect.apply(fnTarget, msg, [])
},
})
} else if (prop === 'retry') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('messageRetry', msg)
count.retry()
//TODO: handle errors
const result = Reflect.apply(fnTarget, msg, [])
return result
},
})
} else {
return Reflect.get(target, prop, msg)
}
},
}
return wrap(msg, msgHandler)
}
const proxyMessageBatch = <E, Q>(batch: MessageBatch, count: MessageStatusCount) => {
const batchHandler: ProxyHandler<MessageBatch> = {
get: (target, prop) => {
if (prop === 'messages') {
const messages = Reflect.get(target, prop)
const messagesHandler: ProxyHandler<MessageBatch['messages']> = {
get: (target, prop) => {
if (typeof prop === 'string' && !isNaN(parseInt(prop))) {
const message = Reflect.get(target, prop)
return proxyQueueMessage(message, count)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(messages, messagesHandler)
} else if (prop === 'ackAll') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('ackAll')
count.ackRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
} else if (prop === 'retryAll') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('retryAll')
count.retryRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
}
return Reflect.get(target, prop)
},
}
return wrap(batch, batchHandler)
}
export function executeQueueHandler(queueFn: QueueHandler, [batch, env, ctx]: QueueHandlerArgs): Promise<void> {
const count = new MessageStatusCount(batch.messages.length)
batch = proxyMessageBatch(batch, count)
const tracer = trace.getTracer('queueHandler')
const options: SpanOptions = {
attributes: {
'queue.name': batch.queue,
},
kind: SpanKind.CONSUMER,
}
const promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {
const traceId = span.spanContext().traceId
api_context.active().setValue(traceIdSymbol, traceId)
try {
const result = queueFn(batch, env, ctx)
await span.setAttribute('queue.implicitly_acked', count.total - count.succeeded - count.failed)
count.ackRemaining()
span.setAttributes(count.toAttributes())
span.end()
return result
} catch (error) {
span.recordException(error as Exception)
span.setAttribute('queue.implicitly_retried', count.total - count.succeeded - count.failed)
count.retryRemaining()
span.end()
throw error
}
})
return promise
}
export function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {
const queueHandler: ProxyHandler<QueueHandler> = {
async apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {
const [batch, orig_env, orig_ctx] = argArray
const config = initialiser(orig_env as Record<string, unknown>, batch)
const env = instrumentEnv(orig_env as Record<string, unknown>)
const { ctx, tracker } = proxyExecutionContext(orig_ctx)
const context = setConfig(config)
try {
const args: QueueHandlerArgs = [batch, env, ctx]
return await api_context.with(context, executeQueueHandler, undefined, target, args)
} catch (error) {
throw error
} finally {
orig_ctx.waitUntil(exportSpans(tracker))
}
},
}
return wrap(queueFn, queueHandler)
}
function instrumentQueueSend(fn: Queue<unknown>['send'], name: string): Queue<unknown>['send'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['send']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'send')
| await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
} |
return wrap(fn, handler)
}
function instrumentQueueSendBatch(fn: Queue<unknown>['sendBatch'], name: string): Queue<unknown>['sendBatch'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['sendBatch']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSendBatch: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'sendBatch')
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
return wrap(fn, handler)
}
export function instrumentQueueSender(queue: Queue<unknown>, name: string) {
const queueHandler: ProxyHandler<Queue<unknown>> = {
get: (target, prop) => {
if (prop === 'send') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSend(sendFn, name)
} else if (prop === 'sendBatch') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSendBatch(sendFn, name)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(queue, queueHandler)
}
| src/instrumentation/queue.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\t\t\treturn tracer.startActiveSpan(`do:storage:${operation}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tconst extraAttrs = StorageAttributes[operation] ? StorageAttributes[operation](argArray, result) : {}\n\t\t\t\tspan.setAttributes(extraAttrs)\n\t\t\t\tspan.setAttribute('hasResult', !!result)\n\t\t\t\tspan.end()\n\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}",
"score": 54.3350832653589
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\t\t\tattributes: {\n\t\t\t\t\tbinding_type: 'KV',\n\t\t\t\t\tkv_namespace: name,\n\t\t\t\t\toperation,\n\t\t\t\t},\n\t\t\t}\n\t\t\treturn tracer.startActiveSpan(`kv:${name}:${operation}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tconst extraAttrs = KVAttributes[operation] ? KVAttributes[operation](argArray, result) : {}\n\t\t\t\tspan.setAttributes(extraAttrs)",
"score": 50.03415363381011
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\t'http.url': argArray[0].url ? sanitiseURL(argArray[0].url) : undefined,\n\t\t\t\t'cache.operation': op,\n\t\t\t}\n\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes }\n\t\t\treturn tracer.startActiveSpan(`cache:${cacheName}:${op}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tif (op === 'match') {\n\t\t\t\t\tspan.setAttribute('cache.hit', !result)\n\t\t\t\t}\n\t\t\t\tspan.end()",
"score": 46.26701224167642
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\tconst handler: ProxyHandler<typeof openFn> = {\n\t\tasync apply(target, thisArg, argArray) {\n\t\t\tconst cacheName = argArray[0]\n\t\t\tconst cache = await Reflect.apply(target, thisArg, argArray)\n\t\t\treturn instrumentCache(cache, cacheName)\n\t\t},\n\t}\n\treturn wrap(openFn, handler)\n}\nfunction _instrumentGlobalCache() {",
"score": 39.77391333533383
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst tracer = trace.getTracer('DO alarmHandler')\n\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {\n\t\tspan.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)\n\t\tcold_start = false\n\t\tspan.setAttribute('do.id', id.toString())\n\t\tif (id.name) span.setAttribute('do.name', id.name)\n\t\ttry {\n\t\t\tawait alarmFn()\n\t\t\tspan.end()",
"score": 39.26628421120381
}
] | typescript | await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
| tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
} |
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 48.27095193466855
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 30.461828898463366
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 28.133884215138043
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst workerConfig = getActiveConfig()\n\tconst acceptTraceContext =\n\t\ttypeof workerConfig.handlers.fetch.acceptTraceContext === 'function'\n\t\t\t? workerConfig.handlers.fetch.acceptTraceContext(request)\n\t\t\t: workerConfig.handlers.fetch.acceptTraceContext ?? true\n\treturn acceptTraceContext ? getParentContextFromHeaders(request.headers) : api_context.active()\n}\nexport function waitUntilTrace(fn: () => Promise<any>): Promise<void> {\n\tconst tracer = trace.getTracer('waitUntil')\n\treturn tracer.startActiveSpan('waitUntil', async (span) => {",
"score": 27.550179839867443
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 22.8660097424778
}
] | typescript | tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return | (env, trigger) => { |
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 28.779998699570587
},
{
"filename": "src/config.ts",
"retrieved_chunk": "import { context } from '@opentelemetry/api'\nimport { ResolvedTraceConfig, Trigger } from './types.js'\nconst configSymbol = Symbol('Otel Workers Tracing Configuration')\nexport type Initialiser = (env: Record<string, unknown>, trigger: Trigger) => ResolvedTraceConfig\nexport function setConfig(config: ResolvedTraceConfig, ctx = context.active()) {\n\treturn ctx.setValue(configSymbol, config)\n}\nexport function getActiveConfig(): ResolvedTraceConfig {\n\tconst config = context.active().getValue(configSymbol) as ResolvedTraceConfig\n\treturn config",
"score": 17.362506022347596
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 15.935594662044062
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 11.852465297256208
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 11.212282500291755
}
] | typescript | (env, trigger) => { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
| function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler { |
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 29.835767376657067
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 26.787882418427046
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tthis._spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}\n\tget spanProcessor() {\n\t\treturn this._spanProcessor\n\t}\n\taddToResource(extra: Resource) {\n\t\tthis.resource.merge(extra)\n\t}\n\tstartSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {",
"score": 20.454980089955633
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 19.044711304257966
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 18.86513168470985
}
] | typescript | function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
| const env = instrumentEnv(orig_env)
const createDO = () => { |
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tconst [batch, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, batch)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: QueueHandlerArgs = [batch, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeQueueHandler, undefined, target, args)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 44.85435311432225
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 43.28618257041181
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\t'queue.messages_success': this.succeeded,\n\t\t\t'queue.messages_failed': this.failed,\n\t\t\t'queue.batch_success': this.succeeded === this.total,\n\t\t}\n\t}\n}\nconst addEvent = (name: string, msg?: Message) => {\n\tconst attrs: Attributes = {}\n\tif (msg) {\n\t\tattrs['queue.message_id'] = msg.id",
"score": 25.92944917522607
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nfunction createInitialiser(config: ConfigurationOption): Initialiser {\n\tif (typeof config === 'function') {\n\t\treturn (env, trigger) => {\n\t\t\tconst conf = parseConfig(config(env, trigger))\n\t\t\tinit(conf)\n\t\t\treturn conf\n\t\t}\n\t} else {\n\t\treturn () => {",
"score": 22.62338406430813
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {\n\treturn !!(trigger as MessageBatch).ackAll\n}\nexport function isAlarm(trigger: Trigger): trigger is 'do-alarm' {\n\treturn trigger === 'do-alarm'\n}\nconst createResource = (config: ResolvedTraceConfig): Resource => {\n\tconst workerResourceAttrs = {\n\t\t[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',\n\t\t[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',",
"score": 22.578518344630766
}
] | typescript | const env = instrumentEnv(orig_env)
const createDO = () => { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function | isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter { |
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 28.729671490747094
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tthis._spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}\n\tget spanProcessor() {\n\t\treturn this._spanProcessor\n\t}\n\taddToResource(extra: Resource) {\n\t\tthis.resource.merge(extra)\n\t}\n\tstartSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {",
"score": 27.003222776488702
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 26.780559712224132
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 23.837527761426735
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 20.804511465223285
}
] | typescript | isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter { |
import { Context, Span } from '@opentelemetry/api'
import { ReadableSpan, SpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { Action, State, stateMachine } from 'ts-checked-fsm'
import { getActiveConfig } from './config.js'
import { TailSampleFn } from './sampling.js'
import { PostProcessorFn } from './types.js'
type CompletedTrace = {
traceId: string
localRootSpan: ReadableSpan
completedSpans: ReadableSpan[]
}
type InProgressTrace = {
inProgressSpanIds: Set<string>
} & CompletedTrace
type InitialState = State<'not_started'>
type InProgressTraceState = State<'in_progress', InProgressTrace>
type TraceCompleteState = State<'trace_complete', CompletedTrace>
type ExportingState = State<'exporting', { promise: Promise<ExportResult> }>
type DoneState = State<'done'>
type StartExportArguments = {
exporter: SpanExporter
tailSampler: TailSampleFn
postProcessor: PostProcessorFn
}
type StartSpanAction = Action<'startSpan', { span: Span }>
type EndSpanAction = Action<'endSpan', { span: ReadableSpan }>
type StartExportAction = Action<'startExport', { args: StartExportArguments }>
function newTrace(currentState: InitialState, { span }: StartSpanAction): InProgressTraceState {
const spanId = span.spanContext().spanId
return {
...currentState,
stateName: 'in_progress',
traceId: span.spanContext().traceId,
localRootSpan: span as unknown as ReadableSpan,
completedSpans: [] as ReadableSpan[],
inProgressSpanIds: new Set([spanId]),
} as const
}
function newSpan(currentState: InProgressTraceState, { span }: StartSpanAction): InProgressTraceState {
const spanId = span.spanContext().spanId
currentState.inProgressSpanIds.add(spanId)
return { ...currentState }
}
function endSpan(
currentState: InProgressTraceState,
{ span }: EndSpanAction
): InProgressTraceState | TraceCompleteState {
currentState.completedSpans.push(span)
currentState.inProgressSpanIds.delete(span.spanContext().spanId)
if (currentState.inProgressSpanIds.size === 0) {
return {
stateName: 'trace_complete',
traceId: currentState.traceId,
localRootSpan: currentState.localRootSpan,
completedSpans: currentState.completedSpans,
} as const
} else {
return { ...currentState }
}
}
function startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {
const { exporter, tailSampler, postProcessor } = args
const { traceId, localRootSpan, completedSpans: spans } = currentState
const shouldExport = tailSampler({ traceId, localRootSpan, spans })
if (shouldExport) {
const exportSpans = postProcessor(spans)
const promise = new Promise<ExportResult>((resolve) => {
exporter.export(exportSpans, resolve)
})
return { stateName: 'exporting', promise }
} else {
return { stateName: 'done' }
}
}
const { nextState } = stateMachine()
.state('not_started')
.state<'in_progress', InProgressTraceState>('in_progress')
.state<'trace_complete', TraceCompleteState>('trace_complete')
.state<'exporting', ExportingState>('exporting')
.state('done')
.transition('not_started', 'in_progress')
.transition('in_progress', 'in_progress')
.transition('in_progress', 'trace_complete')
.transition('trace_complete', 'exporting')
.transition('trace_complete', 'done')
.transition('exporting', 'done')
.action<'startSpan', StartSpanAction>('startSpan')
.action<'endSpan', EndSpanAction>('endSpan')
.action<'startExport', StartExportAction>('startExport')
.action('exportDone')
.actionHandler('not_started', 'startSpan', newTrace)
.actionHandler('in_progress', 'startSpan', newSpan)
.actionHandler('in_progress', 'endSpan', endSpan)
.actionHandler('trace_complete', 'startExport', startExport)
.actionHandler('exporting', 'exportDone', (_c, _a) => {
return { stateName: 'done' } as const
})
.done()
type AnyTraceState = Parameters<typeof nextState>[0]
type AnyTraceAction = Parameters<typeof nextState>[1]
export class BatchTraceSpanProcessor implements SpanProcessor {
private traces: Map<string, AnyTraceState> = new Map()
private inprogressExports: Map<string, Promise<ExportResult>> = new Map()
private action(traceId: string, action: AnyTraceAction): AnyTraceState {
const state = this.traces.get(traceId) || { stateName: 'not_started' }
const newState = nextState(state, action)
if (newState.stateName === 'done') {
this.traces.delete(traceId)
} else {
this.traces.set(traceId, newState)
}
return newState
}
private export(traceId: string) {
const { exporter | , sampling, postProcessor } = getActiveConfig()
const exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor } |
const newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })
if (newState.stateName === 'exporting') {
const promise = newState.promise
this.inprogressExports.set(traceId, promise)
promise.then((result) => {
if (result.code === ExportResultCode.FAILED) {
console.log('Error sending spans to exporter:', result.error)
}
this.action(traceId, { actionName: 'exportDone' })
this.inprogressExports.delete(traceId)
})
}
}
onStart(span: Span, _parentContext: Context): void {
const traceId = span.spanContext().traceId
this.action(traceId, { actionName: 'startSpan', span })
}
onEnd(span: ReadableSpan): void {
const traceId = span.spanContext().traceId
const state = this.action(traceId, { actionName: 'endSpan', span })
if (state.stateName === 'trace_complete') {
this.export(traceId)
}
}
async forceFlush(): Promise<void> {
await Promise.allSettled(this.inprogressExports.values())
}
async shutdown(): Promise<void> {}
}
| src/spanprocessor.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\t\t\t\tacceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,\n\t\t\t},\n\t\t},\n\t\tpostProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),\n\t\tsampling: {\n\t\t\theadSampler,\n\t\t\ttailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),\n\t\t},\n\t\tservice: supplied.service,\n\t}",
"score": 35.7016435445666
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 35.353997540638716
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tif (options.root) {\n\t\t\tcontext = trace.deleteSpan(context)\n\t\t}\n\t\tconst parentSpan = trace.getSpan(context)\n\t\tconst parentSpanContext = parentSpan?.spanContext()\n\t\tconst hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)\n\t\tconst traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()\n\t\tconst spanKind = options.kind || SpanKind.INTERNAL\n\t\tconst sanitisedAttrs = sanitizeAttributes(options.attributes)\n\t\tconst sampler = getActiveConfig().sampling.headSampler",
"score": 27.537974726886507
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 26.4569013793885
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\tconst tracer = trace.getTracer('queueHandler')\n\tconst options: SpanOptions = {\n\t\tattributes: {\n\t\t\t'queue.name': batch.queue,\n\t\t},\n\t\tkind: SpanKind.CONSUMER,\n\t}\n\tconst promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {\n\t\tconst traceId = span.spanContext().traceId\n\t\tapi_context.active().setValue(traceIdSymbol, traceId)",
"score": 20.911252826238794
}
] | typescript | , sampling, postProcessor } = getActiveConfig()
const exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor } |
import {
Tracer,
TraceFlags,
Span,
SpanKind,
SpanOptions,
Context,
context as api_context,
trace,
} from '@opentelemetry/api'
import { sanitizeAttributes } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'
import { SpanImpl } from './span.js'
import { getActiveConfig } from './config.js'
export class WorkerTracer implements Tracer {
private readonly _spanProcessor: SpanProcessor
private readonly resource: Resource
private readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()
constructor(spanProcessor: SpanProcessor, resource: Resource) {
this._spanProcessor = spanProcessor
this.resource = resource
}
get spanProcessor() {
return this._spanProcessor
}
addToResource(extra: Resource) {
this.resource.merge(extra)
}
startSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {
if (options.root) {
context = trace.deleteSpan(context)
}
const parentSpan = trace.getSpan(context)
const parentSpanContext = parentSpan?.spanContext()
const hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)
const traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()
const spanKind = options.kind || SpanKind.INTERNAL
const sanitisedAttrs = sanitizeAttributes(options.attributes)
const sampler = getActiveConfig().sampling.headSampler
const samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])
const { decision, traceState, attributes: attrs } = samplingDecision
const attributes = Object.assign({}, sanitisedAttrs, attrs)
const spanId = this.idGenerator.generateSpanId()
const parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined
const traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE
const spanContext = { traceId, spanId, traceFlags, traceState }
const span = | new SpanImpl({ |
attributes,
name,
onEnd: (span) => {
this.spanProcessor.onEnd(span as unknown as ReadableSpan)
},
resource: this.resource,
spanContext,
parentSpanId,
spanKind,
startTime: options.startTime,
})
//Do not get me started on the idosyncracies of the Otel JS libraries.
//@ts-ignore
this.spanProcessor.onStart(span, context)
return span
}
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, fn: F): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, options: SpanOptions, fn: F): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(
name: string,
options: SpanOptions,
context: Context,
fn: F
): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, ...args: unknown[]): ReturnType<F> {
const options = args.length > 1 ? (args[0] as SpanOptions) : undefined
const parentContext = args.length > 2 ? (args[1] as Context) : api_context.active()
const fn = args[args.length - 1] as F
const span = this.startSpan(name, options, parentContext)
const contextWithSpanSet = trace.setSpan(parentContext, span)
return api_context.with(contextWithSpanSet, fn, undefined, span)
}
}
| src/tracer.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 48.498889822860086
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\tconst spanId = span.spanContext().spanId\n\tcurrentState.inProgressSpanIds.add(spanId)\n\treturn { ...currentState }\n}\nfunction endSpan(\n\tcurrentState: InProgressTraceState,\n\t{ span }: EndSpanAction\n): InProgressTraceState | TraceCompleteState {\n\tcurrentState.completedSpans.push(span)\n\tcurrentState.inProgressSpanIds.delete(span.spanContext().spanId)",
"score": 39.632658845565544
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\treturn {\n\t\t...currentState,\n\t\tstateName: 'in_progress',\n\t\ttraceId: span.spanContext().traceId,\n\t\tlocalRootSpan: span as unknown as ReadableSpan,\n\t\tcompletedSpans: [] as ReadableSpan[],\n\t\tinProgressSpanIds: new Set([spanId]),\n\t} as const\n}\nfunction newSpan(currentState: InProgressTraceState, { span }: StartSpanAction): InProgressTraceState {",
"score": 38.31692811670561
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\tconst tracer = trace.getTracer('queueHandler')\n\tconst options: SpanOptions = {\n\t\tattributes: {\n\t\t\t'queue.name': batch.queue,\n\t\t},\n\t\tkind: SpanKind.CONSUMER,\n\t}\n\tconst promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {\n\t\tconst traceId = span.spanContext().traceId\n\t\tapi_context.active().setValue(traceIdSymbol, traceId)",
"score": 36.34748204624553
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\tonStart(span: Span, _parentContext: Context): void {\n\t\tconst traceId = span.spanContext().traceId\n\t\tthis.action(traceId, { actionName: 'startSpan', span })\n\t}\n\tonEnd(span: ReadableSpan): void {\n\t\tconst traceId = span.spanContext().traceId\n\t\tconst state = this.action(traceId, { actionName: 'endSpan', span })\n\t\tif (state.stateName === 'trace_complete') {\n\t\t\tthis.export(traceId)\n\t\t}",
"score": 34.57121346645416
}
] | typescript | new SpanImpl({ |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
| handler.queue = createQueueHandler(queuer, initialiser)
} |
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 41.246118832167085
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 34.87168065354531
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "\t}\n\tproxyHandler.apply = (target, thisArg, argArray) => {\n\t\tif (handler.apply) {\n\t\t\treturn handler.apply(unwrap(target), unwrap(thisArg), argArray)\n\t\t}\n\t}\n\treturn new Proxy(item, proxyHandler)\n}\nexport function unwrap<T extends object>(item: T): T {\n\tif (item && isWrapped(item)) {",
"score": 30.294723966001136
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\tapply: (target, thisArg, argArray) => {\n\t\t\treturn tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {\n\t\t\t\tspan.setAttribute('queue.operation', 'send')\n\t\t\t\tawait Reflect.apply(target, unwrap(thisArg), argArray)\n\t\t\t\tspan.end()\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}",
"score": 29.40162861270464
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "const unwrapSymbol = Symbol('unwrap')\ntype Wrapped<T> = { [unwrapSymbol]: T } & T\nexport function isWrapped<T>(item: T): item is Wrapped<T> {\n\treturn !!(item as Wrapped<T>)[unwrapSymbol]\n}\nexport function wrap<T extends object>(item: T, handler: ProxyHandler<T>, autoPassthrough: boolean = true): T {\n\tif (isWrapped(item)) {\n\t\treturn item\n\t}\n\tconst proxyHandler = Object.assign({}, handler)",
"score": 28.803540727431134
}
] | typescript | handler.queue = createQueueHandler(queuer, initialiser)
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
| const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
} |
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 37.323182266412594
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 35.12389500586985
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(msg, msgHandler)\n}\nconst proxyMessageBatch = <E, Q>(batch: MessageBatch, count: MessageStatusCount) => {\n\tconst batchHandler: ProxyHandler<MessageBatch> = {\n\t\tget: (target, prop) => {\n\t\t\tif (prop === 'messages') {\n\t\t\t\tconst messages = Reflect.get(target, prop)",
"score": 34.12796351636708
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\tattrs['queue.message_timestamp'] = msg.timestamp.toISOString()\n\t}\n\ttrace.getActiveSpan()?.addEvent(name, attrs)\n}\nconst proxyQueueMessage = <Q>(msg: Message<Q>, count: MessageStatusCount): Message<Q> => {\n\tconst msgHandler: ProxyHandler<Message<Q>> = {\n\t\tget: (target, prop) => {\n\t\t\tif (prop === 'ack') {\n\t\t\t\tconst ackFn = Reflect.get(target, prop)\n\t\t\t\treturn new Proxy(ackFn, {",
"score": 26.27163240668391
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\tget(target, prop) {\n\t\t\tif (prop === 'fetch') {\n\t\t\t\tconst fetcher = Reflect.get(target, prop)\n\t\t\t\tconst attrs = {\n\t\t\t\t\tname: `durable_object:${nsName}`,\n\t\t\t\t\t'do.namespace': nsName,\n\t\t\t\t\t'do.id': target.id.toString(),\n\t\t\t\t\t'do.id.name': target.id.name,\n\t\t\t\t}\n\t\t\t\treturn instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)",
"score": 22.662728401393284
}
] | typescript | const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
| exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: { |
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 36.077408943360325
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 29.865238480746882
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 25.349888901332505
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 24.356962689955452
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 16.674900581706613
}
] | typescript | exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?. | tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
} |
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 40.71626468966795
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 30.461828898463366
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 28.133884215138043
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "type StartExportArguments = {\n\texporter: SpanExporter\n\ttailSampler: TailSampleFn\n\tpostProcessor: PostProcessorFn\n}\ntype StartSpanAction = Action<'startSpan', { span: Span }>\ntype EndSpanAction = Action<'endSpan', { span: ReadableSpan }>\ntype StartExportAction = Action<'startExport', { args: StartExportArguments }>\nfunction newTrace(currentState: InitialState, { span }: StartSpanAction): InProgressTraceState {\n\tconst spanId = span.spanContext().spanId",
"score": 16.242550937116572
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 15.773578627838576
}
] | typescript | tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const | spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
} |
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 29.835767376657067
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 26.787882418427046
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tthis._spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}\n\tget spanProcessor() {\n\t\treturn this._spanProcessor\n\t}\n\taddToResource(extra: Resource) {\n\t\tthis.resource.merge(extra)\n\t}\n\tstartSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {",
"score": 20.454980089955633
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 18.86513168470985
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 18.491470531296763
}
] | typescript | spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: | Sampler | ParentRatioSamplingConfig): sampler is Sampler { |
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/provider.ts",
"retrieved_chunk": " * @param config Configuration object for SDK registration\n */\nexport class WorkerTracerProvider implements TracerProvider {\n\tprivate spanProcessor: SpanProcessor\n\tprivate resource: Resource\n\tprivate tracers: Record<string, Tracer> = {}\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {\n\t\tthis.spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}",
"score": 29.835767376657067
},
{
"filename": "src/provider.ts",
"retrieved_chunk": "\tgetTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n\t\tconst key = `${name}@${version || ''}:${options?.schemaUrl || ''}`\n\t\tif (!this.tracers[key]) {\n\t\t\tthis.tracers[key] = new WorkerTracer(this.spanProcessor, this.resource)\n\t\t}\n\t\treturn this.tracers[key]\n\t}\n\tregister(): void {\n\t\ttrace.setGlobalTracerProvider(this)\n\t\tcontext.setGlobalContextManager(new AsyncLocalStorageContextManager())",
"score": 26.787882418427046
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "\t\tthis._spanProcessor = spanProcessor\n\t\tthis.resource = resource\n\t}\n\tget spanProcessor() {\n\t\treturn this._spanProcessor\n\t}\n\taddToResource(extra: Resource) {\n\t\tthis.resource.merge(extra)\n\t}\n\tstartSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {",
"score": 20.454980089955633
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 19.044711304257966
},
{
"filename": "src/tracer.ts",
"retrieved_chunk": "import { sanitizeAttributes } from '@opentelemetry/core'\nimport { Resource } from '@opentelemetry/resources'\nimport { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'\nimport { SpanImpl } from './span.js'\nimport { getActiveConfig } from './config.js'\nexport class WorkerTracer implements Tracer {\n\tprivate readonly _spanProcessor: SpanProcessor\n\tprivate readonly resource: Resource\n\tprivate readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()\n\tconstructor(spanProcessor: SpanProcessor, resource: Resource) {",
"score": 18.86513168470985
}
] | typescript | Sampler | ParentRatioSamplingConfig): sampler is Sampler { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = | initialiser(env, request)
const context = setConfig(config)
try { |
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 48.69795057246753
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tconst [batch, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, batch)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: QueueHandlerArgs = [batch, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeQueueHandler, undefined, target, args)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 36.399776329547095
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 35.2783781660228
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 32.79997706388592
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 30.56847141397522
}
] | typescript | initialiser(env, request)
const context = setConfig(config)
try { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler | ([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
} |
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 40.71626468966795
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 30.461828898463366
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 28.133884215138043
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "type StartExportArguments = {\n\texporter: SpanExporter\n\ttailSampler: TailSampleFn\n\tpostProcessor: PostProcessorFn\n}\ntype StartSpanAction = Action<'startSpan', { span: Span }>\ntype EndSpanAction = Action<'endSpan', { span: ReadableSpan }>\ntype StartExportAction = Action<'startExport', { args: StartExportArguments }>\nfunction newTrace(currentState: InitialState, { span }: StartSpanAction): InProgressTraceState {\n\tconst spanId = span.spanContext().spanId",
"score": 16.242550937116572
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 15.773578627838576
}
] | typescript | ([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
} |
import { createExportTraceServiceRequest } from '@opentelemetry/otlp-transformer'
import { ExportServiceError, OTLPExporterError } from '@opentelemetry/otlp-exporter-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { SpanExporter } from '@opentelemetry/sdk-trace-base'
import { unwrap } from './wrap.js'
export interface OTLPExporterConfig {
url: string
headers?: Record<string, string>
}
const defaultHeaders: Record<string, string> = {
accept: 'application/json',
'content-type': 'application/json',
}
export class OTLPExporter implements SpanExporter {
private headers: Record<string, string>
private url: string
constructor(config: OTLPExporterConfig) {
this.url = config.url
this.headers = Object.assign({}, defaultHeaders, config.headers)
}
export(items: any[], resultCallback: (result: ExportResult) => void): void {
this._export(items)
.then(() => {
resultCallback({ code: ExportResultCode.SUCCESS })
})
.catch((error: ExportServiceError) => {
resultCallback({ code: ExportResultCode.FAILED, error })
})
}
private _export(items: any[]): Promise<unknown> {
return new Promise<void>((resolve, reject) => {
try {
this.send(items, resolve, reject)
} catch (e) {
reject(e)
}
})
}
send(items: any[], onSuccess: () => void, onError: (error: OTLPExporterError) => void): void {
const exportMessage = createExportTraceServiceRequest(items, true)
const body = JSON.stringify(exportMessage)
const params: RequestInit = {
method: 'POST',
headers: this.headers,
body,
}
unwrap(fetch)(this.url, params)
.then((response) => {
if (response.ok) {
onSuccess()
} else {
onError(new OTLPExporterError(`Exporter received a statusCode: ${response.status}`))
}
})
| .catch((error) => { |
onError(new OTLPExporterError(`Exception during export: ${error.toString()}`, error.code, error.stack))
})
}
async shutdown(): Promise<void> {}
}
| src/exporter.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 29.69708675955026
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\tconst name = id.name || ''\n\tconst promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request)\n\t\t\tif (response.ok) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response",
"score": 25.29098849766487
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "export function gatherResponseAttributes(response: Response): Attributes {\n\tconst attrs: Record<string, string | number> = {}\n\tattrs[SemanticAttributes.HTTP_STATUS_CODE] = response.status\n\tattrs[SemanticAttributes.HTTP_RESPONSE_CONTENT_LENGTH] = response.headers.get('content-length')!\n\tattrs['http.response_content-type'] = response.headers.get('content-type')!\n\treturn attrs\n}\nexport function gatherIncomingCfAttributes(request: Request): Attributes {\n\tconst attrs: Record<string, string | number> = {}\n\tattrs[SemanticAttributes.HTTP_SCHEME] = request.cf?.httpProtocol as string",
"score": 20.85075702271674
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\t\t\t\tset: (h, k, v) => h.set(k, typeof v === 'string' ? v : String(v)),\n\t\t\t\t\t})\n\t\t\t\t}\n\t\t\t\tspan.setAttributes(gatherRequestAttributes(request))\n\t\t\t\tif (request.cf) span.setAttributes(gatherOutgoingCfAttributes(request.cf))\n\t\t\t\tconst response: Response = await Reflect.apply(target, thisArg, [request])\n\t\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\t\tspan.end()\n\t\t\t\treturn response\n\t\t\t})",
"score": 18.365124503417718
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.inprogressExports.set(traceId, promise)\n\t\t\tpromise.then((result) => {\n\t\t\t\tif (result.code === ExportResultCode.FAILED) {\n\t\t\t\t\tconsole.log('Error sending spans to exporter:', result.error)\n\t\t\t\t}\n\t\t\t\tthis.action(traceId, { actionName: 'exportDone' })\n\t\t\t\tthis.inprogressExports.delete(traceId)\n\t\t\t})\n\t\t}\n\t}",
"score": 9.92182888378995
}
] | typescript | .catch((error) => { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, | trigger) => { |
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 28.779998699570587
},
{
"filename": "src/config.ts",
"retrieved_chunk": "import { context } from '@opentelemetry/api'\nimport { ResolvedTraceConfig, Trigger } from './types.js'\nconst configSymbol = Symbol('Otel Workers Tracing Configuration')\nexport type Initialiser = (env: Record<string, unknown>, trigger: Trigger) => ResolvedTraceConfig\nexport function setConfig(config: ResolvedTraceConfig, ctx = context.active()) {\n\treturn ctx.setValue(configSymbol, config)\n}\nexport function getActiveConfig(): ResolvedTraceConfig {\n\tconst config = context.active().getValue(configSymbol) as ResolvedTraceConfig\n\treturn config",
"score": 17.362506022347596
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 15.935594662044062
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 11.852465297256208
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 11.212282500291755
}
] | typescript | trigger) => { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
| postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: { |
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst workerConfig = getActiveConfig()\n\tconst acceptTraceContext =\n\t\ttypeof workerConfig.handlers.fetch.acceptTraceContext === 'function'\n\t\t\t? workerConfig.handlers.fetch.acceptTraceContext(request)\n\t\t\t: workerConfig.handlers.fetch.acceptTraceContext ?? true\n\treturn acceptTraceContext ? getParentContextFromHeaders(request.headers) : api_context.active()\n}\nexport function waitUntilTrace(fn: () => Promise<any>): Promise<void> {\n\tconst tracer = trace.getTracer('waitUntil')\n\treturn tracer.startActiveSpan('waitUntil', async (span) => {",
"score": 47.10833163330941
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 32.85923738005539
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 32.36064358389129
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 25.96570277735554
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 25.848110729048457
}
] | typescript | postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
| const trigger: DOConstructorTrigger = { |
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nexport function instrumentDO(doClass: DOClass, config: ConfigurationOption) {\n\tconst initialiser = createInitialiser(config)\n\treturn instrumentDOClass(doClass, initialiser)\n}\nexport { waitUntilTrace } from './instrumentation/fetch.js'",
"score": 46.28546438137382
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "import { instrumentGlobalCache } from './instrumentation/cache.js'\nimport { createQueueHandler } from './instrumentation/queue.js'\nimport { DOClass, instrumentDOClass } from './instrumentation/do.js'\ntype FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>\ntype QueueHandler = ExportedHandlerQueueHandler\nexport type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig\nexport type ConfigurationOption = TraceConfig | ResolveConfigFn\nexport function isRequest(trigger: Trigger): trigger is Request {\n\treturn trigger instanceof Request\n}",
"score": 26.9013555568447
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 16.810326224069296
},
{
"filename": "src/sdk.ts",
"retrieved_chunk": "}\nfunction createInitialiser(config: ConfigurationOption): Initialiser {\n\tif (typeof config === 'function') {\n\t\treturn (env, trigger) => {\n\t\t\tconst conf = parseConfig(config(env, trigger))\n\t\t\tinit(conf)\n\t\t\treturn conf\n\t\t}\n\t} else {\n\t\treturn () => {",
"score": 16.606131993930294
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tspan.recordException(error as Exception)\n\t\t\tspan.setStatus({ code: SpanStatusCode.ERROR })\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createFetchHandler(fetchFn: FetchHandler, initialiser: Initialiser) {\n\tconst fetchHandler: ProxyHandler<FetchHandler> = {",
"score": 15.679799247711081
}
] | typescript | const trigger: DOConstructorTrigger = { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const | result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') { |
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\treturn wrap(fn, fnHandler)\n}\nexport function instrumentStorage(storage: DurableObjectStorage): DurableObjectStorage {\n\tconst storageHandler: ProxyHandler<DurableObjectStorage> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentStorageFn(fn, operation)\n\t\t},\n\t}",
"score": 43.49650483211849
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\tget: (target, prop, receiver) => {\n\t\t\tconst operation = String(prop)\n\t\t\tconst fn = Reflect.get(target, prop, receiver)\n\t\t\treturn instrumentKVFn(fn, name, operation)\n\t\t},\n\t}\n\treturn wrap(kv, kvHandler)\n}",
"score": 39.63746760782965
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "\tproxyHandler.get = (target, prop, receiver) => {\n\t\tif (prop === unwrapSymbol) {\n\t\t\treturn item\n\t\t} else {\n\t\t\tif (handler.get) {\n\t\t\t\treturn handler.get(target, prop, receiver)\n\t\t\t} else if (autoPassthrough) {\n\t\t\t\treturn passthroughGet(target, prop)\n\t\t\t}\n\t\t}",
"score": 35.2284481738045
},
{
"filename": "src/instrumentation/env.ts",
"retrieved_chunk": "const isDurableObject = (item?: unknown): item is DurableObjectNamespace => {\n\treturn !!(item as DurableObjectNamespace)?.idFromName\n}\nconst instrumentEnv = (env: Record<string, unknown>): Record<string, unknown> => {\n\tconst envHandler: ProxyHandler<Record<string, unknown>> = {\n\t\tget: (target, prop, receiver) => {\n\t\t\tconst item = Reflect.get(target, prop, receiver)\n\t\t\tif (isKVNamespace(item)) {\n\t\t\t\treturn instrumentKV(item, String(prop))\n\t\t\t} else if (isQueue(item)) {",
"score": 33.57697507677733
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\t\tconst messagesHandler: ProxyHandler<MessageBatch['messages']> = {\n\t\t\t\t\tget: (target, prop) => {\n\t\t\t\t\t\tif (typeof prop === 'string' && !isNaN(parseInt(prop))) {\n\t\t\t\t\t\t\tconst message = Reflect.get(target, prop)\n\t\t\t\t\t\t\treturn proxyQueueMessage(message, count)\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\treturn Reflect.get(target, prop)\n\t\t\t\t\t\t}\n\t\t\t\t\t},\n\t\t\t\t}",
"score": 31.014247978791744
}
] | typescript | result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter | ) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: { |
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 31.028165115721926
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 25.349888901332505
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 23.723642506648133
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 20.61209835120424
},
{
"filename": "src/types.ts",
"retrieved_chunk": "import { ReadableSpan, Sampler, SpanExporter } from '@opentelemetry/sdk-trace-base'\nimport { OTLPExporterConfig } from './exporter.js'\nimport { FetchHandlerConfig, FetcherConfig } from './instrumentation/fetch.js'\nimport { TailSampleFn } from './sampling.js'\nexport type PostProcessorFn = (spans: ReadableSpan[]) => ReadableSpan[]\nexport type ExporterConfig = OTLPExporterConfig | SpanExporter\nexport interface HandlerConfig {\n\tfetch?: FetchHandlerConfig\n}\nexport interface ServiceConfig {",
"score": 16.674900581706613
}
] | typescript | ) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: { |
import { context as api_context, trace, SpanOptions, SpanKind, Exception, SpanStatusCode } from '@opentelemetry/api'
import { SemanticAttributes } from '@opentelemetry/semantic-conventions'
import { passthroughGet, unwrap, wrap } from '../wrap.js'
import {
getParentContextFromHeaders,
gatherIncomingCfAttributes,
gatherRequestAttributes,
gatherResponseAttributes,
instrumentFetcher,
} from './fetch.js'
import { instrumentEnv } from './env.js'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans } from './common.js'
import { instrumentStorage } from './do-storage.js'
import { DOConstructorTrigger } from '../types.js'
type FetchFn = DurableObject['fetch']
type AlarmFn = DurableObject['alarm']
type Env = Record<string, unknown>
function instrumentBindingStub(stub: DurableObjectStub, nsName: string): DurableObjectStub {
const stubHandler: ProxyHandler<typeof stub> = {
get(target, prop) {
if (prop === 'fetch') {
const fetcher = Reflect.get(target, prop)
const attrs = {
name: `durable_object:${nsName}`,
'do.namespace': nsName,
'do.id': target.id.toString(),
'do.id.name': target.id.name,
}
return instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(stub, stubHandler)
}
function instrumentBindingGet(getFn: DurableObjectNamespace['get'], nsName: string): DurableObjectNamespace['get'] {
const getHandler: ProxyHandler<DurableObjectNamespace['get']> = {
apply(target, thisArg, argArray) {
const stub: DurableObjectStub = Reflect.apply(target, thisArg, argArray)
return instrumentBindingStub(stub, nsName)
},
}
return wrap(getFn, getHandler)
}
export function instrumentDOBinding(ns: DurableObjectNamespace, nsName: string) {
const nsHandler: ProxyHandler<typeof ns> = {
get(target, prop) {
if (prop === 'get') {
const fn = Reflect.get(ns, prop)
return instrumentBindingGet(fn, nsName)
} else {
return passthroughGet(target, prop)
}
},
}
return wrap(ns, nsHandler)
}
export function instrumentState(state: DurableObjectState) {
const stateHandler: ProxyHandler<DurableObjectState> = {
get(target, prop, receiver) {
const result = Reflect.get(target, prop, unwrap(receiver))
if (prop === 'storage') {
return instrumentStorage(result)
} else if (typeof result === 'function') {
return result.bind(target)
} else {
return result
}
},
}
return wrap(state, stateHandler)
}
let cold_start = true
export type DOClass = { new (state: DurableObjectState, env: any): DurableObject }
export function executeDOFetch(fetchFn: FetchFn, request: Request, id: DurableObjectId): Promise<Response> {
const spanContext = getParentContextFromHeaders(request.headers)
const tracer = trace.getTracer('DO fetchHandler')
const attributes = {
[SemanticAttributes.FAAS_TRIGGER]: 'http',
[SemanticAttributes.FAAS_COLDSTART]: cold_start,
}
cold_start = false
Object.assign(attributes, gatherRequestAttributes(request))
Object.assign(attributes, gatherIncomingCfAttributes(request))
const options: SpanOptions = {
attributes,
kind: SpanKind.SERVER,
}
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.fetchHandler:${name}`, options, spanContext, async (span) => {
try {
const response: Response = await fetchFn(request)
if (response.ok) {
span.setStatus({ code: SpanStatusCode.OK })
}
span.setAttributes(gatherResponseAttributes(response))
span.end()
return response
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
export function executeDOAlarm(alarmFn: NonNullable<AlarmFn>, id: DurableObjectId): Promise<void> {
const tracer = trace.getTracer('DO alarmHandler')
const name = id.name || ''
const promise = tracer.startActiveSpan(`do.alarmHandler:${name}`, async (span) => {
span.setAttribute(SemanticAttributes.FAAS_COLDSTART, cold_start)
cold_start = false
span.setAttribute('do.id', id.toString())
if (id.name) span.setAttribute('do.name', id.name)
try {
await alarmFn()
span.end()
} catch (error) {
span.recordException(error as Exception)
span.setStatus({ code: SpanStatusCode.ERROR })
span.end()
throw error
}
})
return promise
}
function instrumentFetchFn(fetchFn: FetchFn, initialiser: Initialiser, env: Env, id: DurableObjectId): FetchFn {
const fetchHandler: ProxyHandler<FetchFn> = {
async apply(target, thisArg, argArray: Parameters<FetchFn>) {
const request = argArray[0]
const config = initialiser(env, request)
const | context = setConfig(config)
try { |
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOFetch, undefined, bound, request, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(fetchFn, fetchHandler)
}
function instrumentAlarmFn(alarmFn: AlarmFn, initialiser: Initialiser, env: Env, id: DurableObjectId) {
if (!alarmFn) return undefined
const alarmHandler: ProxyHandler<NonNullable<AlarmFn>> = {
async apply(target, thisArg) {
const config = initialiser(env, 'do-alarm')
const context = setConfig(config)
try {
const bound = target.bind(unwrap(thisArg))
return await api_context.with(context, executeDOAlarm, undefined, bound, id)
} catch (error) {
throw error
} finally {
exportSpans()
}
},
}
return wrap(alarmFn, alarmHandler)
}
function instrumentDurableObject(doObj: DurableObject, initialiser: Initialiser, env: Env, state: DurableObjectState) {
const objHandler: ProxyHandler<DurableObject> = {
get(target, prop) {
if (prop === 'fetch') {
const fetchFn = Reflect.get(target, prop)
return instrumentFetchFn(fetchFn, initialiser, env, state.id)
} else if (prop === 'alarm') {
const alarmFn = Reflect.get(target, prop)
return instrumentAlarmFn(alarmFn, initialiser, env, state.id)
} else {
const result = Reflect.get(target, prop)
if (typeof result === 'function') {
result.bind(doObj)
}
return result
}
},
}
return wrap(doObj, objHandler)
}
export function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {
const classHandler: ProxyHandler<DOClass> = {
construct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {
const trigger: DOConstructorTrigger = {
id: orig_state.id.toString(),
name: orig_state.id.name,
}
const constructorConfig = initialiser(orig_env, trigger)
const context = setConfig(constructorConfig)
const state = instrumentState(orig_state)
const env = instrumentEnv(orig_env)
const createDO = () => {
return new target(state, env)
}
const doObj = api_context.with(context, createDO)
return instrumentDurableObject(doObj, initialiser, env, state)
},
}
return wrap(doClass, classHandler)
}
| src/instrumentation/do.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\tapply: async (target, _thisArg, argArray: Parameters<FetchHandler>): Promise<Response> => {\n\t\t\tconst [request, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, request)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: FetchHandlerArgs = [request, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeFetchHandler, undefined, target, args)\n\t\t\t} catch (error) {",
"score": 48.69795057246753
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tconst [batch, orig_env, orig_ctx] = argArray\n\t\t\tconst config = initialiser(orig_env as Record<string, unknown>, batch)\n\t\t\tconst env = instrumentEnv(orig_env as Record<string, unknown>)\n\t\t\tconst { ctx, tracker } = proxyExecutionContext(orig_ctx)\n\t\t\tconst context = setConfig(config)\n\t\t\ttry {\n\t\t\t\tconst args: QueueHandlerArgs = [batch, env, ctx]\n\t\t\t\treturn await api_context.with(context, executeQueueHandler, undefined, target, args)\n\t\t\t} catch (error) {\n\t\t\t\tthrow error",
"score": 36.399776329547095
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 35.2783781660228
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 32.79997706388592
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst promise = tracer.startActiveSpan('fetchHandler', options, spanContext, async (span) => {\n\t\ttry {\n\t\t\tconst response: Response = await fetchFn(request, env, ctx)\n\t\t\tif (response.status < 500) {\n\t\t\t\tspan.setStatus({ code: SpanStatusCode.OK })\n\t\t\t}\n\t\t\tspan.setAttributes(gatherResponseAttributes(response))\n\t\t\tspan.end()\n\t\t\treturn response\n\t\t} catch (error) {",
"score": 30.56847141397522
}
] | typescript | context = setConfig(config)
try { |
import { trace, SpanOptions, SpanKind, Attributes, Exception, context as api_context } from '@opentelemetry/api'
import { Initialiser, setConfig } from '../config.js'
import { exportSpans, proxyExecutionContext } from './common.js'
import { instrumentEnv } from './env.js'
import { unwrap, wrap } from '../wrap.js'
type QueueHandler = ExportedHandlerQueueHandler<unknown, unknown>
export type QueueHandlerArgs = Parameters<QueueHandler>
const traceIdSymbol = Symbol('traceId')
class MessageStatusCount {
succeeded = 0
failed = 0
readonly total: number
constructor(total: number) {
this.total = total
}
ack() {
this.succeeded = this.succeeded + 1
}
ackRemaining() {
this.succeeded = this.total - this.failed
}
retry() {
this.failed = this.failed + 1
}
retryRemaining() {
this.failed = this.total - this.succeeded
}
toAttributes(): Attributes {
return {
'queue.messages_count': this.total,
'queue.messages_success': this.succeeded,
'queue.messages_failed': this.failed,
'queue.batch_success': this.succeeded === this.total,
}
}
}
const addEvent = (name: string, msg?: Message) => {
const attrs: Attributes = {}
if (msg) {
attrs['queue.message_id'] = msg.id
attrs['queue.message_timestamp'] = msg.timestamp.toISOString()
}
trace.getActiveSpan()?.addEvent(name, attrs)
}
const proxyQueueMessage = <Q>(msg: Message<Q>, count: MessageStatusCount): Message<Q> => {
const msgHandler: ProxyHandler<Message<Q>> = {
get: (target, prop) => {
if (prop === 'ack') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('messageAck', msg)
count.ack()
//TODO: handle errors
Reflect.apply(fnTarget, msg, [])
},
})
} else if (prop === 'retry') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('messageRetry', msg)
count.retry()
//TODO: handle errors
const result = Reflect.apply(fnTarget, msg, [])
return result
},
})
} else {
return Reflect.get(target, prop, msg)
}
},
}
return wrap(msg, msgHandler)
}
const proxyMessageBatch = <E, Q>(batch: MessageBatch, count: MessageStatusCount) => {
const batchHandler: ProxyHandler<MessageBatch> = {
get: (target, prop) => {
if (prop === 'messages') {
const messages = Reflect.get(target, prop)
const messagesHandler: ProxyHandler<MessageBatch['messages']> = {
get: (target, prop) => {
if (typeof prop === 'string' && !isNaN(parseInt(prop))) {
const message = Reflect.get(target, prop)
return proxyQueueMessage(message, count)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(messages, messagesHandler)
} else if (prop === 'ackAll') {
const ackFn = Reflect.get(target, prop)
return new Proxy(ackFn, {
apply: (fnTarget) => {
addEvent('ackAll')
count.ackRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
} else if (prop === 'retryAll') {
const retryFn = Reflect.get(target, prop)
return new Proxy(retryFn, {
apply: (fnTarget) => {
addEvent('retryAll')
count.retryRemaining()
//TODO: handle errors
Reflect.apply(fnTarget, batch, [])
},
})
}
return Reflect.get(target, prop)
},
}
return wrap(batch, batchHandler)
}
export function executeQueueHandler(queueFn: QueueHandler, [batch, env, ctx]: QueueHandlerArgs): Promise<void> {
const count = new MessageStatusCount(batch.messages.length)
batch = proxyMessageBatch(batch, count)
const tracer = trace.getTracer('queueHandler')
const options: SpanOptions = {
attributes: {
'queue.name': batch.queue,
},
kind: SpanKind.CONSUMER,
}
const promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {
const traceId = span.spanContext().traceId
api_context.active().setValue(traceIdSymbol, traceId)
try {
const result = queueFn(batch, env, ctx)
await span.setAttribute('queue.implicitly_acked', count.total - count.succeeded - count.failed)
count.ackRemaining()
span.setAttributes(count.toAttributes())
span.end()
return result
} catch (error) {
span.recordException(error as Exception)
span.setAttribute('queue.implicitly_retried', count.total - count.succeeded - count.failed)
count.retryRemaining()
span.end()
throw error
}
})
return promise
}
export function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {
const queueHandler: ProxyHandler<QueueHandler> = {
async apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {
const [batch, orig_env, orig_ctx] = argArray
const config = initialiser(orig_env as Record<string, unknown>, batch)
const env = instrumentEnv(orig_env as Record<string, unknown>)
const { ctx, tracker } = proxyExecutionContext(orig_ctx)
const context = setConfig(config)
try {
const args: QueueHandlerArgs = [batch, env, ctx]
return await api_context.with(context, executeQueueHandler, undefined, target, args)
} catch (error) {
throw error
} finally {
orig_ctx.waitUntil(exportSpans(tracker))
}
},
}
return wrap(queueFn, queueHandler)
}
function instrumentQueueSend(fn: Queue<unknown>['send'], name: string): Queue<unknown>['send'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['send']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'send')
await Reflect.apply(target | , unwrap(thisArg), argArray)
span.end()
})
},
} |
return wrap(fn, handler)
}
function instrumentQueueSendBatch(fn: Queue<unknown>['sendBatch'], name: string): Queue<unknown>['sendBatch'] {
const tracer = trace.getTracer('queueSender')
const handler: ProxyHandler<Queue<unknown>['sendBatch']> = {
apply: (target, thisArg, argArray) => {
return tracer.startActiveSpan(`queueSendBatch: ${name}`, async (span) => {
span.setAttribute('queue.operation', 'sendBatch')
await Reflect.apply(target, unwrap(thisArg), argArray)
span.end()
})
},
}
return wrap(fn, handler)
}
export function instrumentQueueSender(queue: Queue<unknown>, name: string) {
const queueHandler: ProxyHandler<Queue<unknown>> = {
get: (target, prop) => {
if (prop === 'send') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSend(sendFn, name)
} else if (prop === 'sendBatch') {
const sendFn = Reflect.get(target, prop)
return instrumentQueueSendBatch(sendFn, name)
} else {
return Reflect.get(target, prop)
}
},
}
return wrap(queue, queueHandler)
}
| src/instrumentation/queue.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/do-storage.ts",
"retrieved_chunk": "\t\t\treturn tracer.startActiveSpan(`do:storage:${operation}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tconst extraAttrs = StorageAttributes[operation] ? StorageAttributes[operation](argArray, result) : {}\n\t\t\t\tspan.setAttributes(extraAttrs)\n\t\t\t\tspan.setAttribute('hasResult', !!result)\n\t\t\t\tspan.end()\n\t\t\t\treturn result\n\t\t\t})\n\t\t},\n\t}",
"score": 50.72807146041024
},
{
"filename": "src/instrumentation/kv.ts",
"retrieved_chunk": "\t\t\t\tattributes: {\n\t\t\t\t\tbinding_type: 'KV',\n\t\t\t\t\tkv_namespace: name,\n\t\t\t\t\toperation,\n\t\t\t\t},\n\t\t\t}\n\t\t\treturn tracer.startActiveSpan(`kv:${name}:${operation}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tconst extraAttrs = KVAttributes[operation] ? KVAttributes[operation](argArray, result) : {}\n\t\t\t\tspan.setAttributes(extraAttrs)",
"score": 46.342794760857345
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\t\t\t\t'http.url': argArray[0].url ? sanitiseURL(argArray[0].url) : undefined,\n\t\t\t\t'cache.operation': op,\n\t\t\t}\n\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes }\n\t\t\treturn tracer.startActiveSpan(`cache:${cacheName}:${op}`, options, async (span) => {\n\t\t\t\tconst result = await Reflect.apply(target, thisArg, argArray)\n\t\t\t\tif (op === 'match') {\n\t\t\t\t\tspan.setAttribute('cache.hit', !result)\n\t\t\t\t}\n\t\t\t\tspan.end()",
"score": 43.06154079240573
},
{
"filename": "src/instrumentation/cache.ts",
"retrieved_chunk": "\tconst handler: ProxyHandler<typeof openFn> = {\n\t\tasync apply(target, thisArg, argArray) {\n\t\t\tconst cacheName = argArray[0]\n\t\t\tconst cache = await Reflect.apply(target, thisArg, argArray)\n\t\t\treturn instrumentCache(cache, cacheName)\n\t\t},\n\t}\n\treturn wrap(openFn, handler)\n}\nfunction _instrumentGlobalCache() {",
"score": 38.058981530011266
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "\t}\n\tproxyHandler.apply = (target, thisArg, argArray) => {\n\t\tif (handler.apply) {\n\t\t\treturn handler.apply(unwrap(target), unwrap(thisArg), argArray)\n\t\t}\n\t}\n\treturn new Proxy(item, proxyHandler)\n}\nexport function unwrap<T extends object>(item: T): T {\n\tif (item && isWrapped(item)) {",
"score": 31.637840146061013
}
] | typescript | , unwrap(thisArg), argArray)
span.end()
})
},
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
| includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: { |
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 24.75700214546619
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 24.38134864083424
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 23.05446176759674
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 19.932275497772356
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 17.460588465151787
}
] | typescript | includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
| return (env, trigger) => { |
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 32.507361259550144
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 20.24101414588409
},
{
"filename": "src/config.ts",
"retrieved_chunk": "import { context } from '@opentelemetry/api'\nimport { ResolvedTraceConfig, Trigger } from './types.js'\nconst configSymbol = Symbol('Otel Workers Tracing Configuration')\nexport type Initialiser = (env: Record<string, unknown>, trigger: Trigger) => ResolvedTraceConfig\nexport function setConfig(config: ResolvedTraceConfig, ctx = context.active()) {\n\treturn ctx.setValue(configSymbol, config)\n}\nexport function getActiveConfig(): ResolvedTraceConfig {\n\tconst config = context.active().getValue(configSymbol) as ResolvedTraceConfig\n\treturn config",
"score": 17.362506022347596
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 11.852465297256208
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 11.212282500291755
}
] | typescript | return (env, trigger) => { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
| service: supplied.service,
} |
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 48.27095193466855
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "}\nfunction startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {\n\tconst { exporter, tailSampler, postProcessor } = args\n\tconst { traceId, localRootSpan, completedSpans: spans } = currentState\n\tconst shouldExport = tailSampler({ traceId, localRootSpan, spans })\n\tif (shouldExport) {\n\t\tconst exportSpans = postProcessor(spans)\n\t\tconst promise = new Promise<ExportResult>((resolve) => {\n\t\t\texporter.export(exportSpans, resolve)\n\t\t})",
"score": 30.461828898463366
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\t\t\tthis.traces.set(traceId, newState)\n\t\t}\n\t\treturn newState\n\t}\n\tprivate export(traceId: string) {\n\t\tconst { exporter, sampling, postProcessor } = getActiveConfig()\n\t\tconst exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }\n\t\tconst newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })\n\t\tif (newState.stateName === 'exporting') {\n\t\t\tconst promise = newState.promise",
"score": 28.133884215138043
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tconst workerConfig = getActiveConfig()\n\tconst acceptTraceContext =\n\t\ttypeof workerConfig.handlers.fetch.acceptTraceContext === 'function'\n\t\t\t? workerConfig.handlers.fetch.acceptTraceContext(request)\n\t\t\t: workerConfig.handlers.fetch.acceptTraceContext ?? true\n\treturn acceptTraceContext ? getParentContextFromHeaders(request.headers) : api_context.active()\n}\nexport function waitUntilTrace(fn: () => Promise<any>): Promise<void> {\n\tconst tracer = trace.getTracer('waitUntil')\n\treturn tracer.startActiveSpan('waitUntil', async (span) => {",
"score": 27.550179839867443
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface ResolvedTraceConfig extends TraceConfig {\n\texporter: SpanExporter\n\thandlers: Required<HandlerConfig>\n\tfetch: Required<FetcherConfig>\n\tpostProcessor: PostProcessorFn\n\tsampling: Required<SamplingConfig<Sampler>>\n}\nexport interface DOConstructorTrigger {\n\tid: string",
"score": 22.8660097424778
}
] | typescript | service: supplied.service,
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher | = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
} |
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 37.323182266412594
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 35.12389500586985
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(msg, msgHandler)\n}\nconst proxyMessageBatch = <E, Q>(batch: MessageBatch, count: MessageStatusCount) => {\n\tconst batchHandler: ProxyHandler<MessageBatch> = {\n\t\tget: (target, prop) => {\n\t\t\tif (prop === 'messages') {\n\t\t\t\tconst messages = Reflect.get(target, prop)",
"score": 34.12796351636708
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\tattrs['queue.message_timestamp'] = msg.timestamp.toISOString()\n\t}\n\ttrace.getActiveSpan()?.addEvent(name, attrs)\n}\nconst proxyQueueMessage = <Q>(msg: Message<Q>, count: MessageStatusCount): Message<Q> => {\n\tconst msgHandler: ProxyHandler<Message<Q>> = {\n\t\tget: (target, prop) => {\n\t\t\tif (prop === 'ack') {\n\t\t\t\tconst ackFn = Reflect.get(target, prop)\n\t\t\t\treturn new Proxy(ackFn, {",
"score": 26.27163240668391
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\tget(target, prop) {\n\t\t\tif (prop === 'fetch') {\n\t\t\t\tconst fetcher = Reflect.get(target, prop)\n\t\t\t\tconst attrs = {\n\t\t\t\t\tname: `durable_object:${nsName}`,\n\t\t\t\t\t'do.namespace': nsName,\n\t\t\t\t\t'do.id': target.id.toString(),\n\t\t\t\t\t'do.id.name': target.id.name,\n\t\t\t\t}\n\t\t\t\treturn instrumentFetcher(fetcher, () => ({ includeTraceContext: true }), attrs)",
"score": 22.662728401393284
}
] | typescript | = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
} |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export | function instrumentDO(doClass: DOClass, config: ConfigurationOption) { |
const initialiser = createInitialiser(config)
return instrumentDOClass(doClass, initialiser)
}
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\treturn promise\n\t\t},\n\t}\n\treturn wrap(fetchFn, handler, true)\n}\nexport function instrumentGlobalFetch(): void {\n\tglobalThis.fetch = instrumentFetcher(globalThis.fetch, (config) => config.fetch)\n}",
"score": 26.480075478692076
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\tapply: (target, thisArg, argArray) => {\n\t\t\treturn tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {\n\t\t\t\tspan.setAttribute('queue.operation', 'send')\n\t\t\t\tawait Reflect.apply(target, unwrap(thisArg), argArray)\n\t\t\t\tspan.end()\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}",
"score": 25.660898476044174
},
{
"filename": "src/wrap.ts",
"retrieved_chunk": "\t}\n\tproxyHandler.apply = (target, thisArg, argArray) => {\n\t\tif (handler.apply) {\n\t\t\treturn handler.apply(unwrap(target), unwrap(thisArg), argArray)\n\t\t}\n\t}\n\treturn new Proxy(item, proxyHandler)\n}\nexport function unwrap<T extends object>(item: T): T {\n\tif (item && isWrapped(item)) {",
"score": 24.748184736789838
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t}\n\treturn wrap(fn, handler)\n}\nexport function instrumentQueueSender(queue: Queue<unknown>, name: string) {\n\tconst queueHandler: ProxyHandler<Queue<unknown>> = {\n\t\tget: (target, prop) => {\n\t\t\tif (prop === 'send') {\n\t\t\t\tconst sendFn = Reflect.get(target, prop)\n\t\t\t\treturn instrumentQueueSend(sendFn, name)\n\t\t\t} else if (prop === 'sendBatch') {",
"score": 24.51061981048974
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\tfetchFn: Fetcher['fetch'],\n\tconfigFn: getFetchConfig,\n\tattrs?: Attributes\n): Fetcher['fetch'] {\n\tconst handler: ProxyHandler<typeof fetch> = {\n\t\tapply: (target, thisArg, argArray): ReturnType<typeof fetch> => {\n\t\t\tconst workerConfig = getActiveConfig()\n\t\t\tconst config = configFn(workerConfig)\n\t\t\tconst request = new Request(argArray[0], argArray[1])\n\t\t\tconst tracer = trace.getTracer('fetcher')",
"score": 22.75736832659408
}
] | typescript | function instrumentDO(doClass: DOClass, config: ConfigurationOption) { |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
| instructionSet: InstructionSet,
program: AssembledProgram
) => { |
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 12.092058362156138
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 11.510135044476938
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " opcodeString: string;\n};\nexport interface AssembledProgram {\n currentAddress: number;\n matchedInstructions: Array<\n | ConstantLiteralMatchedInstruction\n | ConstantLabelMatchedInstruction\n | ImmediateMatchedInstruction\n | LabelMatchedInstruction\n | LiteralMatchedInstruction",
"score": 11.123686507102217
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " * @param config The global instruction set config\n * @returns\n */\nexport const parseArchLine = (\n line: string,\n lineNumber: number,\n config: InstructionSet\n) => {\n if (line.length == 0 || line.startsWith(\"//\") || line.startsWith(\"#\")) {\n // Comment. Skip",
"score": 10.915589700659684
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": "};\n/**\n * Reads and parses the BASS arch file\n * @param path The path of the arch file\n * @returns The InstructionSet resulting from parsing the arch file\n */\nexport const readArch = async (path: string): Promise<InstructionSet> => {\n const instructionSet: InstructionSet = {\n instructions: [],\n };",
"score": 9.062437570279345
}
] | typescript | instructionSet: InstructionSet,
program: AssembledProgram
) => { |
import { propagation } from '@opentelemetry/api'
import { W3CTraceContextPropagator } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SemanticResourceAttributes } from '@opentelemetry/semantic-conventions'
import {
AlwaysOnSampler,
ParentBasedSampler,
ReadableSpan,
Sampler,
SpanExporter,
TraceIdRatioBasedSampler,
} from '@opentelemetry/sdk-trace-base'
import { Initialiser } from './config.js'
import { OTLPExporter } from './exporter.js'
import { WorkerTracerProvider } from './provider.js'
import { isHeadSampled, isRootErrorSpan, multiTailSampler } from './sampling.js'
import { BatchTraceSpanProcessor } from './spanprocessor.js'
import { Trigger, TraceConfig, ResolvedTraceConfig, ExporterConfig, ParentRatioSamplingConfig } from './types.js'
import { unwrap } from './wrap.js'
import { createFetchHandler, instrumentGlobalFetch } from './instrumentation/fetch.js'
import { instrumentGlobalCache } from './instrumentation/cache.js'
import { createQueueHandler } from './instrumentation/queue.js'
import { DOClass, instrumentDOClass } from './instrumentation/do.js'
type FetchHandler = ExportedHandlerFetchHandler<unknown, unknown>
type QueueHandler = ExportedHandlerQueueHandler
export type ResolveConfigFn = (env: any, trigger: Trigger) => TraceConfig
export type ConfigurationOption = TraceConfig | ResolveConfigFn
export function isRequest(trigger: Trigger): trigger is Request {
return trigger instanceof Request
}
export function isMessageBatch(trigger: Trigger): trigger is MessageBatch {
return !!(trigger as MessageBatch).ackAll
}
export function isAlarm(trigger: Trigger): trigger is 'do-alarm' {
return trigger === 'do-alarm'
}
const createResource = (config: ResolvedTraceConfig): Resource => {
const workerResourceAttrs = {
[SemanticResourceAttributes.CLOUD_PROVIDER]: 'cloudflare',
[SemanticResourceAttributes.CLOUD_PLATFORM]: 'cloudflare.workers',
[SemanticResourceAttributes.CLOUD_REGION]: 'earth',
// [SemanticResourceAttributes.FAAS_NAME]: '//TODO',
// [SemanticResourceAttributes.FAAS_VERSION]: '//TODO',
[SemanticResourceAttributes.FAAS_MAX_MEMORY]: 128,
[SemanticResourceAttributes.TELEMETRY_SDK_LANGUAGE]: 'JavaScript',
[SemanticResourceAttributes.TELEMETRY_SDK_NAME]: '@microlabs/otel-workers-sdk',
}
const serviceResource = new Resource({
[SemanticResourceAttributes.SERVICE_NAME]: config.service.name,
[SemanticResourceAttributes.SERVICE_NAMESPACE]: config.service.namespace,
[SemanticResourceAttributes.SERVICE_VERSION]: config.service.version,
})
const resource = new Resource(workerResourceAttrs)
return resource.merge(serviceResource)
}
function isSpanExporter(exporterConfig: ExporterConfig): exporterConfig is SpanExporter {
return !!(exporterConfig as SpanExporter).export
}
let initialised = false
function init(config: ResolvedTraceConfig): void {
if (!initialised) {
instrumentGlobalCache()
instrumentGlobalFetch()
propagation.setGlobalPropagator(new W3CTraceContextPropagator())
const resource = createResource(config)
const spanProcessor = new BatchTraceSpanProcessor()
const provider = new WorkerTracerProvider(spanProcessor, resource)
provider.register()
initialised = true
}
}
function isSampler(sampler: Sampler | ParentRatioSamplingConfig): sampler is Sampler {
return !!(sampler as Sampler).shouldSample
}
function createSampler(conf: ParentRatioSamplingConfig): Sampler {
const ratioSampler = new TraceIdRatioBasedSampler(conf.ratio)
if (typeof conf.acceptRemote === 'boolean' && !conf.acceptRemote) {
return new ParentBasedSampler({
root: ratioSampler,
remoteParentSampled: ratioSampler,
remoteParentNotSampled: ratioSampler,
})
} else {
return new ParentBasedSampler({ root: ratioSampler })
}
}
function parseConfig(supplied: TraceConfig): ResolvedTraceConfig {
const headSampleConf = supplied.sampling?.headSampler
const headSampler = headSampleConf
? isSampler(headSampleConf)
? headSampleConf
: createSampler(headSampleConf)
: new AlwaysOnSampler()
return {
exporter: isSpanExporter(supplied.exporter) ? supplied.exporter : new OTLPExporter(supplied.exporter),
fetch: {
includeTraceContext: supplied.fetch?.includeTraceContext ?? true,
},
handlers: {
fetch: {
acceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,
},
},
postProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),
sampling: {
headSampler,
tailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),
},
service: supplied.service,
}
}
function createInitialiser(config: ConfigurationOption): Initialiser {
if (typeof config === 'function') {
return (env, trigger) => {
const conf = parseConfig(config(env, trigger))
init(conf)
return conf
}
} else {
return () => {
const conf = parseConfig(config)
init(conf)
return conf
}
}
}
export function instrument<E, Q, C>(
handler: ExportedHandler<E, Q, C>,
config: ConfigurationOption
): ExportedHandler<E, Q, C> {
const initialiser = createInitialiser(config)
if (handler.fetch) {
const fetcher = unwrap(handler.fetch) as FetchHandler
handler.fetch = createFetchHandler(fetcher, initialiser)
}
if (handler.queue) {
const queuer = unwrap(handler.queue) as QueueHandler
handler.queue = createQueueHandler(queuer, initialiser)
}
return handler
}
export function instrumentDO(doClass: DOClass, config: ConfigurationOption) {
const initialiser = createInitialiser(config)
| return instrumentDOClass(doClass, initialiser)
} |
export { waitUntilTrace } from './instrumentation/fetch.js'
| src/sdk.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\t}\n\t\t},\n\t}\n\treturn wrap(doObj, objHandler)\n}\nexport function instrumentDOClass(doClass: DOClass, initialiser: Initialiser): DOClass {\n\tconst classHandler: ProxyHandler<DOClass> = {\n\t\tconstruct(target, [orig_state, orig_env]: ConstructorParameters<DOClass>) {\n\t\t\tconst trigger: DOConstructorTrigger = {\n\t\t\t\tid: orig_state.id.toString(),",
"score": 37.732701285611164
},
{
"filename": "src/instrumentation/do.ts",
"retrieved_chunk": "\t\t\treturn instrumentDurableObject(doObj, initialiser, env, state)\n\t\t},\n\t}\n\treturn wrap(doClass, classHandler)\n}",
"score": 27.642791399063515
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\t\tcount.retryRemaining()\n\t\t\tspan.end()\n\t\t\tthrow error\n\t\t}\n\t})\n\treturn promise\n}\nexport function createQueueHandler(queueFn: QueueHandler, initialiser: Initialiser) {\n\tconst queueHandler: ProxyHandler<QueueHandler> = {\n\t\tasync apply(target, _thisArg, argArray: Parameters<QueueHandler>): Promise<void> {",
"score": 26.73562136777117
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t}\n\treturn wrap(fn, handler)\n}\nexport function instrumentQueueSender(queue: Queue<unknown>, name: string) {\n\tconst queueHandler: ProxyHandler<Queue<unknown>> = {\n\t\tget: (target, prop) => {\n\t\t\tif (prop === 'send') {\n\t\t\t\tconst sendFn = Reflect.get(target, prop)\n\t\t\t\treturn instrumentQueueSend(sendFn, name)\n\t\t\t} else if (prop === 'sendBatch') {",
"score": 25.157737755969706
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\t\tapply: (target, thisArg, argArray) => {\n\t\t\treturn tracer.startActiveSpan(`queueSend: ${name}`, async (span) => {\n\t\t\t\tspan.setAttribute('queue.operation', 'send')\n\t\t\t\tawait Reflect.apply(target, unwrap(thisArg), argArray)\n\t\t\t\tspan.end()\n\t\t\t})\n\t\t},\n\t}\n\treturn wrap(fn, handler)\n}",
"score": 24.75285813788915
}
] | typescript | return instrumentDOClass(doClass, initialiser)
} |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
| lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim(); |
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 11.79382408393788
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 10.944112765680837
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 10.428635822297167
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " const existingLabel = unsetLabels[pc];\n if (existingLabel) {\n existingLabel.push(disassembledInstruction);\n } else {\n unsetLabels[pc] = [disassembledInstruction];\n }\n }\n disassembledInstructions.push(disassembledInstruction);\n }\n // Build label names",
"score": 8.891558803708175
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n } else {\n // This is a literal\n const sortableOpcode = buildSortableOpcode(opcodeString, 0);\n config.instructions.push({\n type: \"literal\",\n regex: cleanAndFinishInstructionRegex(originalInstruction),\n opcodeString,\n sortableOpcode,\n originalInstruction: originalInstruction.trim(),",
"score": 7.389472031065944
}
] | typescript | lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim(); |
import { Context, Span } from '@opentelemetry/api'
import { ReadableSpan, SpanExporter, SpanProcessor } from '@opentelemetry/sdk-trace-base'
import { ExportResult, ExportResultCode } from '@opentelemetry/core'
import { Action, State, stateMachine } from 'ts-checked-fsm'
import { getActiveConfig } from './config.js'
import { TailSampleFn } from './sampling.js'
import { PostProcessorFn } from './types.js'
type CompletedTrace = {
traceId: string
localRootSpan: ReadableSpan
completedSpans: ReadableSpan[]
}
type InProgressTrace = {
inProgressSpanIds: Set<string>
} & CompletedTrace
type InitialState = State<'not_started'>
type InProgressTraceState = State<'in_progress', InProgressTrace>
type TraceCompleteState = State<'trace_complete', CompletedTrace>
type ExportingState = State<'exporting', { promise: Promise<ExportResult> }>
type DoneState = State<'done'>
type StartExportArguments = {
exporter: SpanExporter
tailSampler: TailSampleFn
postProcessor: PostProcessorFn
}
type StartSpanAction = Action<'startSpan', { span: Span }>
type EndSpanAction = Action<'endSpan', { span: ReadableSpan }>
type StartExportAction = Action<'startExport', { args: StartExportArguments }>
function newTrace(currentState: InitialState, { span }: StartSpanAction): InProgressTraceState {
const spanId = span.spanContext().spanId
return {
...currentState,
stateName: 'in_progress',
traceId: span.spanContext().traceId,
localRootSpan: span as unknown as ReadableSpan,
completedSpans: [] as ReadableSpan[],
inProgressSpanIds: new Set([spanId]),
} as const
}
function newSpan(currentState: InProgressTraceState, { span }: StartSpanAction): InProgressTraceState {
const spanId = span.spanContext().spanId
currentState.inProgressSpanIds.add(spanId)
return { ...currentState }
}
function endSpan(
currentState: InProgressTraceState,
{ span }: EndSpanAction
): InProgressTraceState | TraceCompleteState {
currentState.completedSpans.push(span)
currentState.inProgressSpanIds.delete(span.spanContext().spanId)
if (currentState.inProgressSpanIds.size === 0) {
return {
stateName: 'trace_complete',
traceId: currentState.traceId,
localRootSpan: currentState.localRootSpan,
completedSpans: currentState.completedSpans,
} as const
} else {
return { ...currentState }
}
}
function startExport(currentState: TraceCompleteState, { args }: StartExportAction): ExportingState | DoneState {
const { exporter, tailSampler, postProcessor } = args
const { traceId, localRootSpan, completedSpans: spans } = currentState
const shouldExport | = tailSampler({ traceId, localRootSpan, spans })
if (shouldExport) { |
const exportSpans = postProcessor(spans)
const promise = new Promise<ExportResult>((resolve) => {
exporter.export(exportSpans, resolve)
})
return { stateName: 'exporting', promise }
} else {
return { stateName: 'done' }
}
}
const { nextState } = stateMachine()
.state('not_started')
.state<'in_progress', InProgressTraceState>('in_progress')
.state<'trace_complete', TraceCompleteState>('trace_complete')
.state<'exporting', ExportingState>('exporting')
.state('done')
.transition('not_started', 'in_progress')
.transition('in_progress', 'in_progress')
.transition('in_progress', 'trace_complete')
.transition('trace_complete', 'exporting')
.transition('trace_complete', 'done')
.transition('exporting', 'done')
.action<'startSpan', StartSpanAction>('startSpan')
.action<'endSpan', EndSpanAction>('endSpan')
.action<'startExport', StartExportAction>('startExport')
.action('exportDone')
.actionHandler('not_started', 'startSpan', newTrace)
.actionHandler('in_progress', 'startSpan', newSpan)
.actionHandler('in_progress', 'endSpan', endSpan)
.actionHandler('trace_complete', 'startExport', startExport)
.actionHandler('exporting', 'exportDone', (_c, _a) => {
return { stateName: 'done' } as const
})
.done()
type AnyTraceState = Parameters<typeof nextState>[0]
type AnyTraceAction = Parameters<typeof nextState>[1]
export class BatchTraceSpanProcessor implements SpanProcessor {
private traces: Map<string, AnyTraceState> = new Map()
private inprogressExports: Map<string, Promise<ExportResult>> = new Map()
private action(traceId: string, action: AnyTraceAction): AnyTraceState {
const state = this.traces.get(traceId) || { stateName: 'not_started' }
const newState = nextState(state, action)
if (newState.stateName === 'done') {
this.traces.delete(traceId)
} else {
this.traces.set(traceId, newState)
}
return newState
}
private export(traceId: string) {
const { exporter, sampling, postProcessor } = getActiveConfig()
const exportArgs = { exporter, tailSampler: sampling.tailSampler, postProcessor }
const newState = this.action(traceId, { actionName: 'startExport', args: exportArgs })
if (newState.stateName === 'exporting') {
const promise = newState.promise
this.inprogressExports.set(traceId, promise)
promise.then((result) => {
if (result.code === ExportResultCode.FAILED) {
console.log('Error sending spans to exporter:', result.error)
}
this.action(traceId, { actionName: 'exportDone' })
this.inprogressExports.delete(traceId)
})
}
}
onStart(span: Span, _parentContext: Context): void {
const traceId = span.spanContext().traceId
this.action(traceId, { actionName: 'startSpan', span })
}
onEnd(span: ReadableSpan): void {
const traceId = span.spanContext().traceId
const state = this.action(traceId, { actionName: 'endSpan', span })
if (state.stateName === 'trace_complete') {
this.export(traceId)
}
}
async forceFlush(): Promise<void> {
await Promise.allSettled(this.inprogressExports.values())
}
async shutdown(): Promise<void> {}
}
| src/spanprocessor.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/sdk.ts",
"retrieved_chunk": "\t\t\t\tacceptTraceContext: supplied.handlers?.fetch?.acceptTraceContext ?? true,\n\t\t\t},\n\t\t},\n\t\tpostProcessor: supplied.postProcessor || ((spans: ReadableSpan[]) => spans),\n\t\tsampling: {\n\t\t\theadSampler,\n\t\t\ttailSampler: supplied.sampling?.tailSampler || multiTailSampler([isHeadSampled, isRootErrorSpan]),\n\t\t},\n\t\tservice: supplied.service,\n\t}",
"score": 31.528177945663376
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "import { TraceFlags, SpanStatusCode } from '@opentelemetry/api'\nimport { ReadableSpan } from '@opentelemetry/sdk-trace-base'\nexport interface LocalTrace {\n\treadonly traceId: string\n\treadonly localRootSpan: ReadableSpan\n\treadonly spans: ReadableSpan[]\n}\nexport type TailSampleFn = (traceInfo: LocalTrace) => boolean\nexport function multiTailSampler(samplers: TailSampleFn[]): TailSampleFn {\n\treturn (traceInfo) => {",
"score": 23.123071940632723
},
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 23.000609560352018
},
{
"filename": "src/types.ts",
"retrieved_chunk": "\theadSampler?: HS\n\ttailSampler?: TailSampleFn\n}\nexport interface TraceConfig<EC extends ExporterConfig = ExporterConfig> {\n\texporter: EC\n\thandlers?: HandlerConfig\n\tfetch?: FetcherConfig\n\tpostProcessor?: PostProcessorFn\n\tsampling?: SamplingConfig\n\tservice: ServiceConfig",
"score": 18.613930555935475
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\tconst tracer = trace.getTracer('queueHandler')\n\tconst options: SpanOptions = {\n\t\tattributes: {\n\t\t\t'queue.name': batch.queue,\n\t\t},\n\t\tkind: SpanKind.CONSUMER,\n\t}\n\tconst promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {\n\t\tconst traceId = span.spanContext().traceId\n\t\tapi_context.active().setValue(traceIdSymbol, traceId)",
"score": 16.997421858331453
}
] | typescript | = tailSampler({ traceId, localRootSpan, spans })
if (shouldExport) { |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
| await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
); |
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 37.43083165632357
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "const archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst build = async () => {\n const instructionSet = await readArch(archPath);\n const sortedInstructions = instructionSet.instructions.sort(\n (a, b) => a.sortableOpcode - b.sortableOpcode\n );\n const buffer = readFileSync(inputFile);\n const outputString = parseBinaryBuffer(buffer, sortedInstructions);",
"score": 26.54730879419354
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " path: string,\n onLine: (line: string, lineNumber: number) => void\n) => {\n const rl = readline.createInterface({\n input: fs.createReadStream(path),\n crlfDelay: Infinity,\n });\n let lineNumber = 0;\n rl.on(\"line\", (line) => onLine(line.toLowerCase().trim(), ++lineNumber));\n await events.once(rl, \"close\");",
"score": 20.17793402022554
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 15.122474921581205
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 14.825721922631775
}
] | typescript | await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
); |
import {
Tracer,
TraceFlags,
Span,
SpanKind,
SpanOptions,
Context,
context as api_context,
trace,
} from '@opentelemetry/api'
import { sanitizeAttributes } from '@opentelemetry/core'
import { Resource } from '@opentelemetry/resources'
import { SpanProcessor, RandomIdGenerator, ReadableSpan, SamplingDecision } from '@opentelemetry/sdk-trace-base'
import { SpanImpl } from './span.js'
import { getActiveConfig } from './config.js'
export class WorkerTracer implements Tracer {
private readonly _spanProcessor: SpanProcessor
private readonly resource: Resource
private readonly idGenerator: RandomIdGenerator = new RandomIdGenerator()
constructor(spanProcessor: SpanProcessor, resource: Resource) {
this._spanProcessor = spanProcessor
this.resource = resource
}
get spanProcessor() {
return this._spanProcessor
}
addToResource(extra: Resource) {
this.resource.merge(extra)
}
startSpan(name: string, options: SpanOptions = {}, context = api_context.active()): Span {
if (options.root) {
context = trace.deleteSpan(context)
}
const parentSpan = trace.getSpan(context)
const parentSpanContext = parentSpan?.spanContext()
const hasParentContext = parentSpanContext && trace.isSpanContextValid(parentSpanContext)
const traceId = hasParentContext ? parentSpanContext.traceId : this.idGenerator.generateTraceId()
const spanKind = options.kind || SpanKind.INTERNAL
const sanitisedAttrs = sanitizeAttributes(options.attributes)
const sampler = getActiveConfig().sampling.headSampler
const samplingDecision = sampler.shouldSample(context, traceId, name, spanKind, sanitisedAttrs, [])
const { decision, traceState, attributes: attrs } = samplingDecision
const attributes = Object.assign({}, sanitisedAttrs, attrs)
const spanId = this.idGenerator.generateSpanId()
const parentSpanId = hasParentContext ? parentSpanContext.spanId : undefined
const traceFlags = decision === SamplingDecision.RECORD_AND_SAMPLED ? TraceFlags.SAMPLED : TraceFlags.NONE
const spanContext = { traceId, spanId, traceFlags, traceState }
| const span = new SpanImpl({ |
attributes,
name,
onEnd: (span) => {
this.spanProcessor.onEnd(span as unknown as ReadableSpan)
},
resource: this.resource,
spanContext,
parentSpanId,
spanKind,
startTime: options.startTime,
})
//Do not get me started on the idosyncracies of the Otel JS libraries.
//@ts-ignore
this.spanProcessor.onStart(span, context)
return span
}
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, fn: F): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, options: SpanOptions, fn: F): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(
name: string,
options: SpanOptions,
context: Context,
fn: F
): ReturnType<F>
startActiveSpan<F extends (span: Span) => ReturnType<F>>(name: string, ...args: unknown[]): ReturnType<F> {
const options = args.length > 1 ? (args[0] as SpanOptions) : undefined
const parentContext = args.length > 2 ? (args[1] as Context) : api_context.active()
const fn = args[args.length - 1] as F
const span = this.startSpan(name, options, parentContext)
const contextWithSpanSet = trace.setSpan(parentContext, span)
return api_context.with(contextWithSpanSet, fn, undefined, span)
}
}
| src/tracer.ts | evanderkoogh-otel-cf-workers-18fd741 | [
{
"filename": "src/sampling.ts",
"retrieved_chunk": "\t\treturn samplers.reduce((result, sampler) => result || sampler(traceInfo), false)\n\t}\n}\nexport const isHeadSampled: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.spanContext().traceFlags === TraceFlags.SAMPLED\n}\nexport const isRootErrorSpan: TailSampleFn = (traceInfo) => {\n\tconst localRootSpan = traceInfo.localRootSpan as unknown as ReadableSpan\n\treturn localRootSpan.status.code === SpanStatusCode.ERROR",
"score": 50.224006273722004
},
{
"filename": "src/instrumentation/queue.ts",
"retrieved_chunk": "\tconst tracer = trace.getTracer('queueHandler')\n\tconst options: SpanOptions = {\n\t\tattributes: {\n\t\t\t'queue.name': batch.queue,\n\t\t},\n\t\tkind: SpanKind.CONSUMER,\n\t}\n\tconst promise = tracer.startActiveSpan(`queueHandler:${batch.queue}`, options, async (span) => {\n\t\tconst traceId = span.spanContext().traceId\n\t\tapi_context.active().setValue(traceIdSymbol, traceId)",
"score": 43.95112400312183
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\tconst spanId = span.spanContext().spanId\n\tcurrentState.inProgressSpanIds.add(spanId)\n\treturn { ...currentState }\n}\nfunction endSpan(\n\tcurrentState: InProgressTraceState,\n\t{ span }: EndSpanAction\n): InProgressTraceState | TraceCompleteState {\n\tcurrentState.completedSpans.push(span)\n\tcurrentState.inProgressSpanIds.delete(span.spanContext().spanId)",
"score": 40.72707848222585
},
{
"filename": "src/spanprocessor.ts",
"retrieved_chunk": "\treturn {\n\t\t...currentState,\n\t\tstateName: 'in_progress',\n\t\ttraceId: span.spanContext().traceId,\n\t\tlocalRootSpan: span as unknown as ReadableSpan,\n\t\tcompletedSpans: [] as ReadableSpan[],\n\t\tinProgressSpanIds: new Set([spanId]),\n\t} as const\n}\nfunction newSpan(currentState: InProgressTraceState, { span }: StartSpanAction): InProgressTraceState {",
"score": 39.395993277816046
},
{
"filename": "src/instrumentation/fetch.ts",
"retrieved_chunk": "\t\t\tconst options: SpanOptions = { kind: SpanKind.CLIENT, attributes: attrs }\n\t\t\tconst host = new URL(request.url).host\n\t\t\tconst spanName = typeof attrs?.['name'] === 'string' ? attrs?.['name'] : `fetch: ${host}`\n\t\t\tconst promise = tracer.startActiveSpan(spanName, options, async (span) => {\n\t\t\t\tconst includeTraceContext =\n\t\t\t\t\ttypeof config.includeTraceContext === 'function'\n\t\t\t\t\t\t? config.includeTraceContext(request)\n\t\t\t\t\t\t: config.includeTraceContext\n\t\t\t\tif (includeTraceContext ?? true) {\n\t\t\t\t\tpropagation.inject(api_context.active(), request.headers, {",
"score": 37.10311451369575
}
] | typescript | const span = new SpanImpl({ |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const | matches = labelRegex.exec(line); |
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 16.846957557577085
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 12.247932324876931
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 11.176413264878063
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " stringLength: number;\n };\n};\nexport type LiteralInstruction = InstructionBase & {\n type: \"literal\";\n};\n/**\n * Parses a single line of a BASS architecture file\n * @param line The line being parsed\n * @param lineNumber The one-based index of the line being processed",
"score": 11.064324291538282
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 10.245822545617361
}
] | typescript | matches = labelRegex.exec(line); |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, | (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
); |
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 37.43083165632357
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "const archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst build = async () => {\n const instructionSet = await readArch(archPath);\n const sortedInstructions = instructionSet.instructions.sort(\n (a, b) => a.sortableOpcode - b.sortableOpcode\n );\n const buffer = readFileSync(inputFile);\n const outputString = parseBinaryBuffer(buffer, sortedInstructions);",
"score": 24.918671886661354
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " path: string,\n onLine: (line: string, lineNumber: number) => void\n) => {\n const rl = readline.createInterface({\n input: fs.createReadStream(path),\n crlfDelay: Infinity,\n });\n let lineNumber = 0;\n rl.on(\"line\", (line) => onLine(line.toLowerCase().trim(), ++lineNumber));\n await events.once(rl, \"close\");",
"score": 19.403992921265832
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 13.720784938778515
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " * @param config The global instruction set config\n * @returns\n */\nexport const parseArchLine = (\n line: string,\n lineNumber: number,\n config: InstructionSet\n) => {\n if (line.length == 0 || line.startsWith(\"//\") || line.startsWith(\"#\")) {\n // Comment. Skip",
"score": 12.483696738909522
}
] | typescript | (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
); |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
| const outputBuffer = outputInstructions(program, word16Align); |
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 37.43083165632357
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "const archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst build = async () => {\n const instructionSet = await readArch(archPath);\n const sortedInstructions = instructionSet.instructions.sort(\n (a, b) => a.sortableOpcode - b.sortableOpcode\n );\n const buffer = readFileSync(inputFile);\n const outputString = parseBinaryBuffer(buffer, sortedInstructions);",
"score": 26.54730879419354
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " path: string,\n onLine: (line: string, lineNumber: number) => void\n) => {\n const rl = readline.createInterface({\n input: fs.createReadStream(path),\n crlfDelay: Infinity,\n });\n let lineNumber = 0;\n rl.on(\"line\", (line) => onLine(line.toLowerCase().trim(), ++lineNumber));\n await events.once(rl, \"close\");",
"score": 20.17793402022554
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 15.122474921581205
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 14.626999033072403
}
] | typescript | const outputBuffer = outputInstructions(program, word16Align); |
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
| ): instruction is ImmediateInstruction => { |
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
| src/lib/disassembly.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " instructions: Array<Instruction>;\n}\nexport type Instruction = ImmediateInstruction | LiteralInstruction;\nexport interface InstructionBase {\n regex: RegExp;\n opcodeString: string;\n sortableOpcode: number;\n originalInstruction: string;\n}\nexport type ImmediateInstruction = InstructionBase & {",
"score": 22.633176213694416
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n }\n};\nconst buildSortableOpcode = (template: string, bitCount: number) =>\n buildOpcode(template, bitCount, 0);\nconst cleanAndFinishInstructionRegex = (instruction: string): RegExp => {\n const cleaned = instruction\n .trim()\n .replace(whitespaceRegex, whitespaceRegex.source);\n // Force nothing but whitespace from beginning of string to instruction",
"score": 18.452483495609915
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 15.915806302823105
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " const [originalInstruction, opcode] = sections;\n if (!originalInstruction || !opcode) {\n log(\"Unknown input\", lineNumber);\n return;\n }\n const opcodeString = opcode.trim();\n let numberMatch = originalInstruction.match(bassNumberRegex);\n if (!!numberMatch && numberMatch.index) {\n // This instruction contains a star followed by a number\n // This is an immediate",
"score": 15.528304125800386
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " // Force nothing but whitespace and a comment from instruction to end of string\n return new RegExp(\n instructionPrefixRegex.source + cleaned + instructionSuffixRegex.source\n );\n};",
"score": 13.264426306034595
}
] | typescript | ): instruction is ImmediateInstruction => { |
import {
bassNumberRegex,
instructionPrefixRegex,
instructionSuffixRegex,
whitespaceRegex,
} from "./regex";
import { log } from "./log";
import { parseNumber } from "./util";
import { buildOpcode } from "./opcodeOutput";
export interface InstructionSet {
instructions: Array<Instruction>;
}
export type Instruction = ImmediateInstruction | LiteralInstruction;
export interface InstructionBase {
regex: RegExp;
opcodeString: string;
sortableOpcode: number;
originalInstruction: string;
}
export type ImmediateInstruction = InstructionBase & {
type: "immediate";
immediate: {
bitCount: number;
/**
* The index in the originalInstruction the immediate occurs
*/
stringIndex: number;
/**
* The length of the immediate in the originalInstruction string
*/
stringLength: number;
};
};
export type LiteralInstruction = InstructionBase & {
type: "literal";
};
/**
* Parses a single line of a BASS architecture file
* @param line The line being parsed
* @param lineNumber The one-based index of the line being processed
* @param config The global instruction set config
* @returns
*/
export const parseArchLine = (
line: string,
lineNumber: number,
config: InstructionSet
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith("#")) {
// Comment. Skip
return;
}
const sections = line.split(";");
if (sections.length != 2) {
log(
"Unexpected semicolon. Does this instruction have an output?",
lineNumber
);
return;
}
const [originalInstruction, opcode] = sections;
if (!originalInstruction || !opcode) {
log("Unknown input", lineNumber);
return;
}
const opcodeString = opcode.trim();
let numberMatch = originalInstruction.match(bassNumberRegex);
if (!!numberMatch && numberMatch.index) {
// This instruction contains a star followed by a number
// This is an immediate
const matchString = numberMatch[0];
// This is guaranteed to exist due to the regex
| const bitCount = parseNumber(numberMatch[1]!); |
const index = numberMatch.index;
const instructionLine =
originalInstruction.substring(0, index) +
"(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))" +
originalInstruction.substring(index + matchString.length);
const sortableOpcode = buildSortableOpcode(opcodeString, bitCount);
config.instructions.push({
type: "immediate",
regex: cleanAndFinishInstructionRegex(instructionLine),
immediate: {
bitCount,
stringIndex: index,
stringLength: matchString.length,
},
opcodeString,
sortableOpcode,
originalInstruction: originalInstruction.trim(),
});
} else {
// This is a literal
const sortableOpcode = buildSortableOpcode(opcodeString, 0);
config.instructions.push({
type: "literal",
regex: cleanAndFinishInstructionRegex(originalInstruction),
opcodeString,
sortableOpcode,
originalInstruction: originalInstruction.trim(),
});
}
};
const buildSortableOpcode = (template: string, bitCount: number) =>
buildOpcode(template, bitCount, 0);
const cleanAndFinishInstructionRegex = (instruction: string): RegExp => {
const cleaned = instruction
.trim()
.replace(whitespaceRegex, whitespaceRegex.source);
// Force nothing but whitespace from beginning of string to instruction
// Force nothing but whitespace and a comment from instruction to end of string
return new RegExp(
instructionPrefixRegex.source + cleaned + instructionSuffixRegex.source
);
};
| src/lib/bass.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/extractIcons.ts",
"retrieved_chunk": " for (let i = 0; i < buffer.length; i += 2) {\n // Skip the low byte of every word\n const highNibble = buffer[i]! & 0xf;\n if (highNibble === 0x9) {\n // LBPX\n // This is probably a set of pixels for an image\n lbpxCount += 1;\n } else if (highNibble === 0x1 && lbpxCount > 0) {\n // RETD\n // We have some number of possible pixels, so consider this a complete image write",
"score": 22.17796304088524
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " * @param template The opcode template from the BASS arch file\n * @param argSize The number of bits in an argument to the opcode, if any\n * @param argument The actual data to pass as an argument to the opcode, if any\n * @returns The output opcode as a 12 bit word\n */\nexport const buildOpcode = (\n template: string,\n argSize: number,\n argument: number\n) => {",
"score": 20.278700625662502
},
{
"filename": "src/extractIcons.ts",
"retrieved_chunk": " // Load the second byte of the word, which is where the immediate is\n const byte = romBuffer[image.address + x * 2 + 1]!;\n const imageCoord = (y * IMAGE_WIDTH + x) * 4;\n if (((byte >> y) & 0x1) !== 0) {\n // Pixel at x, y is on\n // Array is initialized to 0, so we can just toggle the alpha value to display black\n imageBuffer[imageCoord + 3] = 0xff;\n } else {\n imageBuffer[imageCoord + 3] = 0;\n }",
"score": 18.159260896346677
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " const matches = instruction.regex.exec(line);\n const address = program.currentAddress;\n if (!!matches && matches.length > 0) {\n if (matches[1] !== undefined) {\n // immediate\n if (instruction.type !== \"immediate\") {\n log(\n \"Attempted to match content with non-immediate instruction\",\n lineNumber\n );",
"score": 18.01177777085752
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " return;\n }\n program.matchedInstructions.push({\n type: \"immediate\",\n line,\n immediate: parseNumber(matches[1]),\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,",
"score": 16.334662715884075
}
] | typescript | const bitCount = parseNumber(numberMatch[1]!); |
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += | ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`; |
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
| src/lib/disassembly.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 19.66240845311435
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " console.log(`Received ${argv.length - 2} arguments. Expected 2-3\\n`);\n console.log(\n \"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}\"\n );\n process.exit(1);\n}\nconst archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst word16Align = argv[4] !== \"true\";",
"score": 14.643519038263667
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 13.147531500008625
},
{
"filename": "src/extractIcons.ts",
"retrieved_chunk": " console.log(`Received ${argv.length - 2} arguments. Expected 1\\n`);\n console.log(\"Usage: node extractIcons.js [input.bin]\");\n process.exit(1);\n}\nconst inputFile = argv[2] as string;\nconst build = async () => {\n const buffer = readFileSync(inputFile);\n generateImages(buffer);\n};\nbuild();",
"score": 11.239118571313584
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " return;\n }\n const sections = line.split(\";\");\n if (sections.length != 2) {\n log(\n \"Unexpected semicolon. Does this instruction have an output?\",\n lineNumber\n );\n return;\n }",
"score": 11.227061435935084
}
] | typescript | ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`; |
import { DisassembledInstruction } from "./types";
import { isLetterChar, maskOfSize } from "./util";
export const buildDisassembledInstructionString = (
{ instruction, actualWord, address }: DisassembledInstruction,
immediateLabel: string | undefined
) => {
let instructionString = instruction.originalInstruction;
if (instruction.type === "immediate") {
const { bitCount, stringIndex, stringLength } = instruction.immediate;
const immediatePrefix = instructionString.substring(0, stringIndex);
const immediateSuffix = instructionString.substring(
stringIndex + stringLength
);
let immediate = "";
if (immediateLabel) {
immediate = immediateLabel;
} else {
const argument = maskOfSize(bitCount) & actualWord;
if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {
// If letter, treat as decimal
immediate = argument.toString();
} else {
// Otherwise, treat as hex
immediate = `0x${argument.toString(16).toUpperCase()}`;
}
}
instructionString = `${immediatePrefix}${immediate}${immediateSuffix}`;
}
// Separate out instruction so that it formats nicely
// Four total columns
// Opcode - Source - Dest - Comments
const splitInstruction = instructionString.split(/\s+/);
let lastPadWidth = 0;
for (let i = 2; i >= splitInstruction.length - 1; i--) {
lastPadWidth += columnPadWidth(i);
}
const formattedInstructionString = splitInstruction
. | map((s, i) => { |
const pad =
i === splitInstruction.length - 1 ? lastPadWidth : columnPadWidth(i);
return s.padEnd(pad);
})
.join("");
const comment = `// 0x${address
.toString(16)
.toUpperCase()
.padEnd(4)} (0x${actualWord.toString(16).toUpperCase()})`;
return `${formattedInstructionString.padEnd(81)}${comment}`;
};
const columnPadWidth = (column: number) => {
switch (column) {
case 0:
return 6;
case 1:
return 5;
case 2:
return 10;
}
return 0;
};
| src/lib/display.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " 8192\n );\n for (let i = 0; i < buffer.length; i += 2) {\n const highByte = buffer[i]!;\n const lowByte = buffer[i + 1]!;\n const address = i / 2;\n const correctedWord = (highByte << 8) | lowByte;\n const instruction = findWordInstruction(correctedWord, instructions);\n const disassembledInstruction: DisassembledInstruction = {\n instruction,",
"score": 31.687472349835627
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": "): Buffer => {\n const bufferSize = word16Align ? 8192 * 2 : (8192 * 3) / 2;\n const buffer = Buffer.alloc(bufferSize);\n let byteBuffer = 0;\n let bufferAddress = 0;\n let lowNibble = false;\n let evenByte = true;\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n const nibble = threeNibbleBuffer[i]!;\n const writeSpacerValue = word16Align && !lowNibble && evenByte;",
"score": 30.337544956587326
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " let index = 0;\n let outputWord = 0;\n while (index < template.length) {\n const char = template[index];\n if (char === \"%\") {\n // Consume chars until whitespace\n let data = 0;\n let count = 0;\n for (let i = 1; i < Math.min(13, template.length - index); i++) {\n const nextChar = template[index + i]!;",
"score": 28.923054110029593
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": "};\nconst findWordInstruction = (word: number, instructions: Instruction[]) => {\n // Naive because it doesn't really matter\n let bestMatch = instructions[0]!;\n for (let i = 0; i < instructions.length; i++) {\n const instruction = instructions[i]!;\n if (instruction.sortableOpcode <= word) {\n bestMatch = instruction;\n } else {\n // We've passed the best solution, end",
"score": 25.81040250225652
},
{
"filename": "src/extractIcons.ts",
"retrieved_chunk": " for (let i = 0; i < buffer.length; i += 2) {\n // Skip the low byte of every word\n const highNibble = buffer[i]! & 0xf;\n if (highNibble === 0x9) {\n // LBPX\n // This is probably a set of pixels for an image\n lbpxCount += 1;\n } else if (highNibble === 0x1 && lbpxCount > 0) {\n // RETD\n // We have some number of possible pixels, so consider this a complete image write",
"score": 23.940133111362258
}
] | typescript | map((s, i) => { |
import { DisassembledInstruction } from "./types";
import { isLetterChar, maskOfSize } from "./util";
export const buildDisassembledInstructionString = (
{ instruction, actualWord, address }: DisassembledInstruction,
immediateLabel: string | undefined
) => {
let instructionString = instruction.originalInstruction;
if (instruction.type === "immediate") {
const { bitCount, stringIndex, stringLength } = instruction.immediate;
const immediatePrefix = instructionString.substring(0, stringIndex);
const immediateSuffix = instructionString.substring(
stringIndex + stringLength
);
let immediate = "";
if (immediateLabel) {
immediate = immediateLabel;
} else {
const argument = maskOfSize(bitCount) & actualWord;
if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {
// If letter, treat as decimal
immediate = argument.toString();
} else {
// Otherwise, treat as hex
immediate = `0x${argument.toString(16).toUpperCase()}`;
}
}
instructionString = `${immediatePrefix}${immediate}${immediateSuffix}`;
}
// Separate out instruction so that it formats nicely
// Four total columns
// Opcode - Source - Dest - Comments
const splitInstruction = instructionString.split(/\s+/);
let lastPadWidth = 0;
for (let i = 2; i >= splitInstruction.length - 1; i--) {
lastPadWidth += columnPadWidth(i);
}
const formattedInstructionString = splitInstruction
.map | ((s, i) => { |
const pad =
i === splitInstruction.length - 1 ? lastPadWidth : columnPadWidth(i);
return s.padEnd(pad);
})
.join("");
const comment = `// 0x${address
.toString(16)
.toUpperCase()
.padEnd(4)} (0x${actualWord.toString(16).toUpperCase()})`;
return `${formattedInstructionString.padEnd(81)}${comment}`;
};
const columnPadWidth = (column: number) => {
switch (column) {
case 0:
return 6;
case 1:
return 5;
case 2:
return 10;
}
return 0;
};
| src/lib/display.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " 8192\n );\n for (let i = 0; i < buffer.length; i += 2) {\n const highByte = buffer[i]!;\n const lowByte = buffer[i + 1]!;\n const address = i / 2;\n const correctedWord = (highByte << 8) | lowByte;\n const instruction = findWordInstruction(correctedWord, instructions);\n const disassembledInstruction: DisassembledInstruction = {\n instruction,",
"score": 31.687472349835627
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": "): Buffer => {\n const bufferSize = word16Align ? 8192 * 2 : (8192 * 3) / 2;\n const buffer = Buffer.alloc(bufferSize);\n let byteBuffer = 0;\n let bufferAddress = 0;\n let lowNibble = false;\n let evenByte = true;\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n const nibble = threeNibbleBuffer[i]!;\n const writeSpacerValue = word16Align && !lowNibble && evenByte;",
"score": 30.337544956587326
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " let index = 0;\n let outputWord = 0;\n while (index < template.length) {\n const char = template[index];\n if (char === \"%\") {\n // Consume chars until whitespace\n let data = 0;\n let count = 0;\n for (let i = 1; i < Math.min(13, template.length - index); i++) {\n const nextChar = template[index + i]!;",
"score": 28.923054110029593
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": "};\nconst findWordInstruction = (word: number, instructions: Instruction[]) => {\n // Naive because it doesn't really matter\n let bestMatch = instructions[0]!;\n for (let i = 0; i < instructions.length; i++) {\n const instruction = instructions[i]!;\n if (instruction.sortableOpcode <= word) {\n bestMatch = instruction;\n } else {\n // We've passed the best solution, end",
"score": 25.81040250225652
},
{
"filename": "src/extractIcons.ts",
"retrieved_chunk": " for (let i = 0; i < buffer.length; i += 2) {\n // Skip the low byte of every word\n const highNibble = buffer[i]! & 0xf;\n if (highNibble === 0x9) {\n // LBPX\n // This is probably a set of pixels for an image\n lbpxCount += 1;\n } else if (highNibble === 0x1 && lbpxCount > 0) {\n // RETD\n // We have some number of possible pixels, so consider this a complete image write",
"score": 23.940133111362258
}
] | typescript | ((s, i) => { |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
| const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) }; |
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
if (lines.length === 0 && this.required) {
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
} | src/ConversationHistory.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 68.00913023623285
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " const truncated = tokenizer.decode(encoded.slice(0, delta));\n output.push({ role: msg!.role, content: truncated });\n length += delta;\n }\n }\n }\n return { output: output, length: length, tooLong: length > maxTokens };\n }\n public static getMessageText(message: Message): string {\n let text = message.content ?? '';",
"score": 59.392389002369875
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 55.745753689564104
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return tokenizer.encode(output.join(this.separator)).length;\n } else {\n let length = 0;\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n length += section.layout.length;\n }\n }\n return length;",
"score": 55.51025166515105
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);\n // Truncate if fixed length\n text = this.textPrefix + text;\n if (this.tokens > 1.0 && length > this.tokens) {\n const encoded = tokenizer.encode(text);\n text = tokenizer.decode(encoded.slice(0, this.tokens));\n length = this.tokens;\n }\n return { output: text, length: length, tooLong: length > maxTokens };\n }",
"score": 53.14244635528218
}
] | typescript | const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) }; |
import { Message, PromptFunctions, PromptMemory, PromptSection, RenderedPromptSection, Tokenizer } from "./types";
/**
* Abstract Base class for most prompt sections.
*/
export abstract class PromptSectionBase implements PromptSection {
public readonly required: boolean;
public readonly tokens: number;
public readonly separator: string;
public readonly textPrefix: string;
/**
* Creates a new 'PromptSectionBase' instance.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix: string = '') {
this.required = required;
this.tokens = tokens;
this.separator = separator;
this.textPrefix = textPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Render as messages
const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);
// Convert to text
let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);
// Calculate length
const prefixLength = tokenizer.encode(this.textPrefix).length;
const separatorLength = tokenizer.encode(this.separator).length;
let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);
// Truncate if fixed length
text = this.textPrefix + text;
if (this.tokens > 1.0 && length > this.tokens) {
const encoded = tokenizer.encode(text);
text = tokenizer.decode(encoded.slice(0, this.tokens));
length = this.tokens;
}
return { output: text, length: length, tooLong: length > maxTokens };
}
public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;
protected returnMessages( | output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> { |
// Truncate if fixed length
if (this.tokens > 1.0) {
while (length > this.tokens) {
const msg = output.pop();
const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));
length -= encoded.length;
if (length < this.tokens) {
const delta = this.tokens - length;
const truncated = tokenizer.decode(encoded.slice(0, delta));
output.push({ role: msg!.role, content: truncated });
length += delta;
}
}
}
return { output: output, length: length, tooLong: length > maxTokens };
}
public static getMessageText(message: Message): string {
let text = message.content ?? '';
if (message.function_call) {
text = JSON.stringify(message.function_call);
} else if (message.name) {
text = `${message.name} returned ${text}`;
}
return text;
}
} | src/PromptSectionBase.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 70.93183063947342
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " lines.unshift(line);\n }\n return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Get messages from memory\n const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];\n // Populate messages and stay under the token budget\n let tokens = 0;\n const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;",
"score": 66.6569770123997
},
{
"filename": "src/FunctionCallMessage.ts",
"retrieved_chunk": " this.function_call = function_call;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._length = tokenizer.encode(JSON.stringify(this.function_call)).length;\n }\n // Return output\n return this.returnMessages([{ role: 'assistant', content: null, function_call: this.function_call }], this._length, tokenizer, maxTokens);\n }",
"score": 64.69466790143787
},
{
"filename": "src/GroupSection.ts",
"retrieved_chunk": " super(tokens, required, separator, textPrefix);\n this._layoutEngine = new LayoutEngine(sections, tokens, required, separator);\n this.sections = sections;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Render sections to text\n const { output, length, tooLong } = await this._layoutEngine.renderAsText(memory, functions, tokenizer, maxTokens);\n // Return output as a single message\n return this.returnMessages([{ role: this.role, content: output }], length, tokenizer, maxTokens);",
"score": 63.3220709138135
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " this.variable = variable;\n this.userPrefix = userPrefix;\n this.assistantPrefix = assistantPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Get messages from memory\n const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];\n // Populate history and stay under the token budget\n let tokens = 0;\n const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;",
"score": 60.685651557019575
}
] | typescript | output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> { |
import { strict as assert } from "assert";
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { VolatileMemory } from "./VolatileMemory";
import { FunctionRegistry } from "./FunctionRegistry";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
export class TestSection extends PromptSectionBase {
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
return this.returnMessages([{ role: 'test', content: 'Hello Big World' }], 3, tokenizer, maxTokens);
}
}
export class MultiTestSection extends PromptSectionBase {
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
return this.returnMessages([{ role: 'test', content: 'Hello Big' },{ role: 'test', content: 'World' }], 3, tokenizer, maxTokens);
}
}
describe("PromptSectionBase", () => {
const memory = new VolatileMemory();
const functions = new FunctionRegistry();
const tokenizer = new GPT3Tokenizer();
describe("constructor", () => {
it("should create a TestSection", () => {
const section = new TestSection();
assert.equal(section.tokens, -1);
assert.equal(section.required, true);
assert.equal(section.separator, "\n");
assert.equal(section.textPrefix, "");
});
});
describe("renderAsMessages", () => {
it("should render a TestSection to an array of messages", async () => {
const section = new TestSection();
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big World" }]);
assert.equal(rendered.length, 3);
assert.equal(rendered.tooLong, false);
});
it("should truncate a fixed length TestSection", async () => {
| const section = new TestSection(2); |
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
it("should identify a fixed length TestSection as being tooLong", async () => {
const section = new TestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, true);
});
it("should drop messages to truncate a fixed length MultiTestSection", async () => {
const section = new MultiTestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
});
describe("renderAsText", () => {
it("should render a TestSection to a string", async () => {
const section = new TestSection();
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "Hello Big World");
assert.equal(rendered.length, 3);
assert.equal(rendered.tooLong, false);
});
it("should truncate a fixed length TestSection", async () => {
const section = new TestSection(4, true, "\n", "user: ");
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "user: Hello Big");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, false);
});
it("should identify a fixed length TestSection as being tooLong", async () => {
const section = new TestSection(4, true, "\n", "user: ");
const rendered = await section.renderAsText(memory, functions, tokenizer, 1);
assert.equal(rendered.output, "user: Hello Big");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, true);
});
});
});
| src/PromptSectionBase.spec.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/UserMessage.spec.ts",
"retrieved_chunk": " describe(\"renderAsMessages\", () => {\n it(\"should render a UserMessage to an array of messages\", async () => {\n const section = new UserMessage(\"Hello World\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n });\n describe(\"renderAsText\", () => {",
"score": 50.00574974095733
},
{
"filename": "src/AssistantMessage.spec.ts",
"retrieved_chunk": " describe(\"renderAsMessages\", () => {\n it(\"should render a AssistantMessage to an array of messages\", async () => {\n const section = new AssistantMessage(\"Hello World\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"assistant\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n });\n describe(\"renderAsText\", () => {",
"score": 50.00574974095733
},
{
"filename": "src/GroupSection.spec.ts",
"retrieved_chunk": " });\n describe(\"renderAsMessages\", () => {\n it(\"should render a TextSection to an array of messages\", async () => {\n const section = new GroupSection([\n new TextSection(\"Hello World\", \"user\")\n ]);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"system\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);",
"score": 49.81347013191888
},
{
"filename": "src/TemplateSection.spec.ts",
"retrieved_chunk": " assert.equal(section.separator, \"\\n\");\n });\n });\n describe(\"renderAsMessages\", () => {\n it(\"should render a TemplateSection to an array of messages\", async () => {\n const section = new TemplateSection(\"Hello World\", \"user\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);",
"score": 48.74471769040215
},
{
"filename": "src/Prompt.spec.ts",
"retrieved_chunk": " describe(\"renderAsMessages\", () => {\n it(\"should render a TextSection to an array of messages\", async () => {\n const prompt = new Prompt([\n new TextSection(\"Hello World\", \"user\")\n ]);\n const rendered = await prompt.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });",
"score": 48.06582324622677
}
] | typescript | const section = new TestSection(2); |
import { strict as assert } from "assert";
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { VolatileMemory } from "./VolatileMemory";
import { FunctionRegistry } from "./FunctionRegistry";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
export class TestSection extends PromptSectionBase {
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
return this.returnMessages([{ role: 'test', content: 'Hello Big World' }], 3, tokenizer, maxTokens);
}
}
export class MultiTestSection extends PromptSectionBase {
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
return this.returnMessages([{ role: 'test', content: 'Hello Big' },{ role: 'test', content: 'World' }], 3, tokenizer, maxTokens);
}
}
describe("PromptSectionBase", () => {
const memory = new VolatileMemory();
const functions = new FunctionRegistry();
const tokenizer = new GPT3Tokenizer();
describe("constructor", () => {
it("should create a TestSection", () => {
const section = new TestSection();
assert.equal(section.tokens, -1);
assert.equal(section.required, true);
assert.equal(section.separator, "\n");
assert.equal(section.textPrefix, "");
});
});
describe("renderAsMessages", () => {
it("should render a TestSection to an array of messages", async () => {
const section = new TestSection();
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big World" }]);
assert.equal(rendered.length, 3);
assert.equal(rendered.tooLong, false);
});
it("should truncate a fixed length TestSection", async () => {
const section = new TestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
it("should identify a fixed length TestSection as being tooLong", async () => {
const section = new TestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, true);
});
it("should drop messages to truncate a fixed length MultiTestSection", async () => {
const section = new MultiTestSection(2);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [{ role: "test", content: "Hello Big" }]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
});
describe("renderAsText", () => {
it("should render a TestSection to a string", async () => {
const section = new TestSection();
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "Hello Big World");
assert.equal(rendered.length, 3);
assert.equal(rendered.tooLong, false);
});
it("should truncate a fixed length TestSection", async () => {
| const section = new TestSection(4, true, "\n", "user: "); |
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "user: Hello Big");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, false);
});
it("should identify a fixed length TestSection as being tooLong", async () => {
const section = new TestSection(4, true, "\n", "user: ");
const rendered = await section.renderAsText(memory, functions, tokenizer, 1);
assert.equal(rendered.output, "user: Hello Big");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, true);
});
});
});
| src/PromptSectionBase.spec.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/TemplateSection.spec.ts",
"retrieved_chunk": " assert.equal(rendered.output, \"Hello Big World\");\n assert.equal(rendered.length, 3);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should render a template with a {{function}} and backtick arguments\", async () => {\n const section = new TemplateSection(\"Hello {{test2 `Big World`}}\", \"user\");\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello Big World\");\n assert.equal(rendered.length, 3);\n assert.equal(rendered.tooLong, false);",
"score": 42.82746707562052
},
{
"filename": "src/TemplateSection.spec.ts",
"retrieved_chunk": " });\n it(\"should render a template with a {{function}} and multiple arguments\", async () => {\n const section = new TemplateSection(\"Hello {{test3 'Big' World}}\", \"user\");\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello Big World\");\n assert.equal(rendered.length, 3);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should skip {{}} empty template params\", async () => {\n const section = new TemplateSection(\"{{}}\", \"user\");",
"score": 41.24333475720256
},
{
"filename": "src/GroupSection.spec.ts",
"retrieved_chunk": " assert.equal(rendered.tooLong, true);\n });\n it(\"should render multiple TextSections to a string\", async () => {\n const section = new GroupSection([\n new TextSection(\"Hello\", \"user\"),\n new TextSection(\"World\", \"user\")\n ]);\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello\\n\\nWorld\");\n assert.equal(rendered.length, 4);",
"score": 40.94788236878069
},
{
"filename": "src/GroupSection.spec.ts",
"retrieved_chunk": " assert.equal(rendered.tooLong, false);\n });\n });\n describe(\"renderAsText\", () => {\n it(\"should render a TextSection to a string\", async () => {\n const section = new GroupSection([\n new TextSection(\"Hello World\", \"user\")\n ]);\n const rendered = await section.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello World\");",
"score": 39.556847382958566
},
{
"filename": "src/Prompt.spec.ts",
"retrieved_chunk": " });\n it(\"should render multiple TextSections to a string\", async () => {\n const prompt = new Prompt([\n new TextSection(\"Hello\", \"user\"),\n new TextSection(\"World\", \"user\")\n ]);\n const rendered = await prompt.renderAsText(memory, functions, tokenizer, 100);\n assert.equal(rendered.output, \"Hello\\n\\nWorld\");\n assert.equal(rendered.length, 4);\n assert.equal(rendered.tooLong, false);",
"score": 39.16227588381133
}
] | typescript | const section = new TestSection(4, true, "\n", "user: "); |
import { strict as assert } from "assert";
import { FunctionRegistry } from "./FunctionRegistry";
import { VolatileMemory } from "./VolatileMemory";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
describe("FunctionRegistry", () => {
describe("constructor", () => {
it("should create a FunctionRegistry", () => {
const registry = new FunctionRegistry();
assert.notEqual(registry, null);
assert.equal(registry.has("test"), false);
});
it("should create a FunctionRegistry with initial functions", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.notEqual(registry, null);
assert.equal(registry.has("test"), true);
});
});
describe("addFunction", () => {
it("should add a function", () => {
const registry = new FunctionRegistry();
registry.addFunction("test", async (memory, functions, tokenizer, args) => { });
assert.equal(registry.has("test"), true);
});
it("should throw when adding a function that already exists", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.throws(() => registry.addFunction("test", async (memory, functions, tokenizer, args) => { }));
});
});
describe("get", () => {
it("should get a function", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
const fn = registry.get("test");
assert.notEqual(fn, null);
});
it("should throw when getting a function that doesn't exist", () => {
const registry = new FunctionRegistry();
assert.throws(() => registry.get("test"));
});
});
describe("has", () => {
it("should return false when a function doesn't exist", () => {
const registry = new FunctionRegistry();
assert.equal(registry.has("test"), false);
});
it("should return true when a function exists", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.equal(registry.has("test"), true);
});
});
describe("invoke", () => {
const memory = new VolatileMemory();
const tokenizer = new GPT3Tokenizer();
it("should invoke a function", async () => {
let called = false;
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => {
assert.equal(args.length, 1);
assert.equal(args[0], "Hello World");
called = true;
}
});
await registry | .invoke("test", memory, registry, tokenizer, ["Hello World"]); |
assert.equal(called, true);
});
it("should throw when invoking a function that doesn't exist", () => {
const registry = new FunctionRegistry();
assert.throws(() => registry.invoke("test", memory, registry, tokenizer, ["Hello World"]));
});
});
});
| src/FunctionRegistry.spec.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/TemplateSection.spec.ts",
"retrieved_chunk": " 'test': async (memory, functions, tokenizer, args) => 'Hello World',\n 'test2': async (memory, functions, tokenizer, args) => args[0],\n 'test3': async (memory, functions, tokenizer, args) => args.join(' '),\n });\n const tokenizer = new GPT3Tokenizer();\n describe(\"constructor\", () => {\n it(\"should create a TemplateSection\", () => {\n const section = new TemplateSection(\"Hello World\", \"user\");\n assert.equal(section.template, \"Hello World\");\n assert.equal(section.role, \"user\");",
"score": 38.413893846146095
},
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " }\n this._functions.set(name, value);\n }\n public invoke(key: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any> {\n const fn = this.get(key);\n return fn(memory, functions, tokenizer, args);\n }\n}",
"score": 27.260070488250292
},
{
"filename": "src/TemplateSection.ts",
"retrieved_chunk": " }\n break;\n }\n }\n // Add final part\n savePart();\n // Return renderer\n return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {\n const value = await functions.invoke(name, memory, functions, tokenizer, args);\n return Utilities.toString(tokenizer, value);",
"score": 23.947198537348797
},
{
"filename": "src/PromptSectionBase.spec.ts",
"retrieved_chunk": "}\nexport class MultiTestSection extends PromptSectionBase {\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n return this.returnMessages([{ role: 'test', content: 'Hello Big' },{ role: 'test', content: 'World' }], 3, tokenizer, maxTokens);\n }\n}\ndescribe(\"PromptSectionBase\", () => {\n const memory = new VolatileMemory();\n const functions = new FunctionRegistry();\n const tokenizer = new GPT3Tokenizer();",
"score": 21.532660041388375
},
{
"filename": "src/TemplateSection.ts",
"retrieved_chunk": " } else {\n args.push(part);\n }\n part = '';\n }\n }\n // Parse function name and args\n let part = '';\n let state = ParseState.inText;\n let stringDelim = '';",
"score": 21.06470735424592
}
] | typescript | .invoke("test", memory, registry, tokenizer, ["Hello World"]); |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const message: Message = { role: msg.role, content: Utilities | .toString(tokenizer, msg.content) }; |
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
if (lines.length === 0 && this.required) {
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
} | src/ConversationHistory.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 67.24666596141581
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " const truncated = tokenizer.decode(encoded.slice(0, delta));\n output.push({ role: msg!.role, content: truncated });\n length += delta;\n }\n }\n }\n return { output: output, length: length, tooLong: length > maxTokens };\n }\n public static getMessageText(message: Message): string {\n let text = message.content ?? '';",
"score": 59.392389002369875
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return tokenizer.encode(output.join(this.separator)).length;\n } else {\n let length = 0;\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n length += section.layout.length;\n }\n }\n return length;",
"score": 55.51025166515105
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 54.42216313410072
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);\n // Truncate if fixed length\n text = this.textPrefix + text;\n if (this.tokens > 1.0 && length > this.tokens) {\n const encoded = tokenizer.encode(text);\n text = tokenizer.decode(encoded.slice(0, this.tokens));\n length = this.tokens;\n }\n return { output: text, length: length, tooLong: length > maxTokens };\n }",
"score": 53.14244635528218
}
] | typescript | .toString(tokenizer, msg.content) }; |
import { strict as assert } from "assert";
import { ConversationHistory } from "./ConversationHistory";
import { VolatileMemory } from "./VolatileMemory";
import { FunctionRegistry } from "./FunctionRegistry";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
describe("ConversationHistory", () => {
const memory = new VolatileMemory({
"history": [
{ role: "user", content: "Hello" },
{ role: "assistant", content: "Hi" },
],
"longHistory": [
{ role: "user", content: "Hello" },
{ role: "assistant", content: "Hi! How can I help you?" },
{ role: "user", content: "I'd like to book a flight" },
{ role: "assistant", content: "Sure, where would you like to go?" },
]
});
const functions = new FunctionRegistry();
const tokenizer = new GPT3Tokenizer();
describe("constructor", () => {
it("should create a ConversationHistory", () => {
const section = new ConversationHistory('history');
assert.equal(section.variable, 'history');
assert.equal(section.tokens, 1.0);
assert.equal(section.required, false);
assert.equal(section.separator, "\n");
assert.equal(section.userPrefix, "user: ");
assert.equal(section.assistantPrefix, "assistant: ");
assert.equal(section.textPrefix, "");
});
});
describe("renderAsMessages", () => {
it("should render a ConversationHistory to an array of messages", async () => {
const section = new ConversationHistory('history', 100);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, [
{ role: "user", content: "Hello" },
{ role: "assistant", content: "Hi" },
]);
assert.equal(rendered.length, 2);
assert.equal(rendered.tooLong, false);
});
it("should truncate its output to match available budget", async () => {
const section = new ConversationHistory('history', 1);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);
assert.deepEqual(rendered.output, [
{ role: "assistant", content: "Hi" },
]);
assert.equal(rendered.length, 1);
assert.equal(rendered.tooLong, false);
});
it("should render nothing when there's no history", async () => {
const section = new ConversationHistory('nohistory', 100);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);
assert.deepEqual(rendered.output, []);
assert.equal(rendered.length, 0);
assert.equal(rendered.tooLong, false);
});
it("should render nothing for a long last message", async () => {
const section = new ConversationHistory('longHistory', 100);
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);
assert.deepEqual(rendered.output, []);
assert.equal(rendered.length, 0);
assert.equal(rendered.tooLong, false);
});
it("should always render the last message when section is required", async () => {
const section = new | ConversationHistory('longHistory', 100, true); |
const rendered = await section.renderAsMessages(memory, functions, tokenizer, 2);
assert.deepEqual(rendered.output, [
{ role: "assistant", content: "Sure, where would you like to go?" },
]);
assert.equal(rendered.length, 9);
assert.equal(rendered.tooLong, true);
});
});
describe("renderAsText", () => {
it("should render a ConversationHistory to a string", async () => {
const section = new ConversationHistory('history', 100);
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "user: Hello\nassistant: Hi");
assert.equal(rendered.length, 8);
assert.equal(rendered.tooLong, false);
});
it("should truncate its output to match available budget", async () => {
const section = new ConversationHistory('history', 1);
const rendered = await section.renderAsText(memory, functions, tokenizer, 4);
assert.equal(rendered.output, "assistant: Hi");
assert.equal(rendered.length, 4);
assert.equal(rendered.tooLong, false);
});
it("should render nothing when there's no history", async () => {
const section = new ConversationHistory('nohistory', 100);
const rendered = await section.renderAsText(memory, functions, tokenizer, 100);
assert.equal(rendered.output, "");
assert.equal(rendered.length, 0);
assert.equal(rendered.tooLong, false);
});
it("should render nothing for a long last message", async () => {
const section = new ConversationHistory('longHistory', 100);
const rendered = await section.renderAsText(memory, functions, tokenizer, 2);
assert.equal(rendered.output, "");
assert.equal(rendered.length, 0);
assert.equal(rendered.tooLong, false);
});
it("should always render the last message when section is required", async () => {
const section = new ConversationHistory('longHistory', 100, true);
const rendered = await section.renderAsText(memory, functions, tokenizer, 2);
assert.equal(rendered.output, "assistant: Sure, where would you like to go?");
assert.equal(rendered.length, 12);
assert.equal(rendered.tooLong, true);
});
});
});
| src/ConversationHistory.spec.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/PromptSectionBase.spec.ts",
"retrieved_chunk": " it(\"should render a TestSection to an array of messages\", async () => {\n const section = new TestSection();\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"test\", content: \"Hello Big World\" }]);\n assert.equal(rendered.length, 3);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should truncate a fixed length TestSection\", async () => {\n const section = new TestSection(2);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);",
"score": 34.70220168744882
},
{
"filename": "src/GroupSection.spec.ts",
"retrieved_chunk": " it(\"should render a hierarchy of sections to a single message\", async () => {\n const section = new GroupSection([\n new GroupSection([\n new TextSection(\"Hello\", \"user\")\n ]),\n new TextSection(\"World\", \"user\")\n ]);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"system\", content: \"Hello\\n\\nWorld\" }]);\n assert.equal(rendered.length, 4);",
"score": 33.190407374985654
},
{
"filename": "src/GroupSection.spec.ts",
"retrieved_chunk": " });\n describe(\"renderAsMessages\", () => {\n it(\"should render a TextSection to an array of messages\", async () => {\n const section = new GroupSection([\n new TextSection(\"Hello World\", \"user\")\n ]);\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"system\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);",
"score": 32.83166508455764
},
{
"filename": "src/TextSection.spec.ts",
"retrieved_chunk": " const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n it(\"should identify a output as being too long\", async () => {\n const section = new TextSection(\"Hello World\", \"user\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 1);\n assert.deepEqual(rendered.output, [{ role: \"user\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);",
"score": 32.814313578835375
},
{
"filename": "src/SystemMessage.spec.ts",
"retrieved_chunk": " it(\"should render a SystemMessage to an array of messages\", async () => {\n const section = new SystemMessage(\"Hello World\");\n const rendered = await section.renderAsMessages(memory, functions, tokenizer, 100);\n assert.deepEqual(rendered.output, [{ role: \"system\", content: \"Hello World\" }]);\n assert.equal(rendered.length, 2);\n assert.equal(rendered.tooLong, false);\n });\n });\n describe(\"renderAsText\", () => {\n it(\"should render a TemplateSection to a string\", async () => {",
"score": 32.387066778932564
}
] | typescript | ConversationHistory('longHistory', 100, true); |
import { Message, PromptFunctions, PromptMemory, PromptSection, RenderedPromptSection, Tokenizer } from "./types";
/**
* Abstract Base class for most prompt sections.
*/
export abstract class PromptSectionBase implements PromptSection {
public readonly required: boolean;
public readonly tokens: number;
public readonly separator: string;
public readonly textPrefix: string;
/**
* Creates a new 'PromptSectionBase' instance.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix: string = '') {
this.required = required;
this.tokens = tokens;
this.separator = separator;
this.textPrefix = textPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Render as messages
const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);
// Convert to text
let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);
// Calculate length
const prefixLength = tokenizer.encode(this.textPrefix).length;
const separatorLength = tokenizer.encode(this.separator).length;
let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);
// Truncate if fixed length
text = this.textPrefix + text;
if (this.tokens > 1.0 && length > this.tokens) {
const encoded = tokenizer.encode(text);
text = tokenizer.decode(encoded.slice(0, this.tokens));
length = this.tokens;
}
return { output: text, length: length, tooLong: length > maxTokens };
}
public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;
protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {
// Truncate if fixed length
if (this.tokens > 1.0) {
while (length > this.tokens) {
const msg = output.pop();
const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));
length -= encoded.length;
if (length < this.tokens) {
const delta = this.tokens - length;
const truncated = tokenizer.decode(encoded.slice(0, delta));
output.push({ role: msg!.role, content: truncated });
length += delta;
}
}
}
return { output: output, length: length, tooLong: length > maxTokens };
}
public static getMessageText(message: Message): string {
| let text = message.content ?? ''; |
if (message.function_call) {
text = JSON.stringify(message.function_call);
} else if (message.name) {
text = `${message.name} returned ${text}`;
}
return text;
}
} | src/PromptSectionBase.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " const messages: Message[] = [];\n for (let i = history.length - 1; i >= 0; i--) {\n // Clone message\n const msg = history[i];\n const message: Message = Object.assign({}, msg);\n if (msg.content !== null) {\n message.content = Utilities.toString(tokenizer, msg.content);\n }\n // Get message length\n const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;",
"score": 35.845457018131064
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " const separatorLength = tokenizer.encode(this.separator).length;\n const lines: string[] = [];\n for (let i = history.length - 1; i >= 0; i--) {\n const msg = history[i];\n const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) };\n const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;\n const line = prefix + message.content;\n const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);\n // Add initial line if required\n if (lines.length === 0 && this.required) {",
"score": 30.058477087006818
},
{
"filename": "src/GroupSection.ts",
"retrieved_chunk": " super(tokens, required, separator, textPrefix);\n this._layoutEngine = new LayoutEngine(sections, tokens, required, separator);\n this.sections = sections;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Render sections to text\n const { output, length, tooLong } = await this._layoutEngine.renderAsText(memory, functions, tokenizer, maxTokens);\n // Return output as a single message\n return this.returnMessages([{ role: this.role, content: output }], length, tokenizer, maxTokens);",
"score": 23.661765657114692
},
{
"filename": "src/TemplateSection.ts",
"retrieved_chunk": " const text = renderedParts.join('');\n const length = tokenizer.encode(text).length;\n // Return output\n return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);\n }\n private parseTemplate(): void {\n // Parse template\n let part = '';\n let state = ParseState.inText;\n let stringDelim = '';",
"score": 22.738606255807248
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 21.982754583090685
}
] | typescript | let text = message.content ?? ''; |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const vaue = memory.get(name);
return Promise.resolve( | Utilities.toString(tokenizer, vaue)); |
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const value = await functions.invoke(name, memory, functions, tokenizer, args);
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
} | src/TemplateSection.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface PromptFunctions {\n has(name: string): boolean;\n get(name: string): PromptFunction;\n invoke(name: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any>;\n}\nexport interface Tokenizer {\n decode(tokens: number[]): string;\n encode(text: string): number[];\n}",
"score": 56.40160685024237
},
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " }\n this._functions.set(name, value);\n }\n public invoke(key: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any> {\n const fn = this.get(key);\n return fn(memory, functions, tokenizer, args);\n }\n}",
"score": 53.57421135997245
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 47.16427141157599
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 43.87926442117522
},
{
"filename": "src/PromptSectionBase.spec.ts",
"retrieved_chunk": "}\nexport class MultiTestSection extends PromptSectionBase {\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n return this.returnMessages([{ role: 'test', content: 'Hello Big' },{ role: 'test', content: 'World' }], 3, tokenizer, maxTokens);\n }\n}\ndescribe(\"PromptSectionBase\", () => {\n const memory = new VolatileMemory();\n const functions = new FunctionRegistry();\n const tokenizer = new GPT3Tokenizer();",
"score": 41.61214676750321
}
] | typescript | Utilities.toString(tokenizer, vaue)); |
import { Message, PromptFunctions, PromptMemory, PromptSection, RenderedPromptSection, Tokenizer } from "./types";
/**
* Abstract Base class for most prompt sections.
*/
export abstract class PromptSectionBase implements PromptSection {
public readonly required: boolean;
public readonly tokens: number;
public readonly separator: string;
public readonly textPrefix: string;
/**
* Creates a new 'PromptSectionBase' instance.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix: string = '') {
this.required = required;
this.tokens = tokens;
this.separator = separator;
this.textPrefix = textPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Render as messages
const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);
// Convert to text
let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);
// Calculate length
const prefixLength = tokenizer.encode(this.textPrefix).length;
const separatorLength = tokenizer.encode(this.separator).length;
let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);
// Truncate if fixed length
text = this.textPrefix + text;
if (this.tokens > 1.0 && length > this.tokens) {
const encoded = tokenizer.encode(text);
text = tokenizer.decode(encoded.slice(0, this.tokens));
length = this.tokens;
}
return { output: text, length: length, tooLong: length > maxTokens };
}
public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;
protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {
// Truncate if fixed length
if (this.tokens > 1.0) {
while (length > this.tokens) {
const msg = output.pop();
const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));
length -= encoded.length;
if (length < this.tokens) {
const delta = this.tokens - length;
const truncated = tokenizer.decode(encoded.slice(0, delta));
output.push({ role: msg | !.role, content: truncated }); |
length += delta;
}
}
}
return { output: output, length: length, tooLong: length > maxTokens };
}
public static getMessageText(message: Message): string {
let text = message.content ?? '';
if (message.function_call) {
text = JSON.stringify(message.function_call);
} else if (message.name) {
text = `${message.name} returned ${text}`;
}
return text;
}
} | src/PromptSectionBase.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " const separatorLength = tokenizer.encode(this.separator).length;\n const lines: string[] = [];\n for (let i = history.length - 1; i >= 0; i--) {\n const msg = history[i];\n const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) };\n const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;\n const line = prefix + message.content;\n const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);\n // Add initial line if required\n if (lines.length === 0 && this.required) {",
"score": 54.47299777794328
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " const messages: Message[] = [];\n for (let i = history.length - 1; i >= 0; i--) {\n // Clone message\n const msg = history[i];\n const message: Message = Object.assign({}, msg);\n if (msg.content !== null) {\n message.content = Utilities.toString(tokenizer, msg.content);\n }\n // Get message length\n const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;",
"score": 52.97562330478857
},
{
"filename": "src/GPT3Tokenizer.spec.ts",
"retrieved_chunk": " describe(\"encode\", () => {\n it(\"should encode a string\", async () => {\n const tokenizer = new GPT3Tokenizer();\n encoded = await tokenizer.encode(\"Hello World\");\n assert.equal(encoded.length, 2);\n assert.equal(typeof encoded[0], \"number\");\n });\n });\n describe(\"decode\", () => {\n it(\"should decode an array of numbers\", async () => {",
"score": 40.9267061336789
},
{
"filename": "src/GPT3Tokenizer.spec.ts",
"retrieved_chunk": " const tokenizer = new GPT3Tokenizer();\n const decoded = await tokenizer.decode(encoded);\n assert.equal(decoded, \"Hello World\");\n });\n });\n});",
"score": 32.39242686388698
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " lines.unshift(line);\n }\n return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Get messages from memory\n const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];\n // Populate messages and stay under the token budget\n let tokens = 0;\n const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;",
"score": 31.89736710768996
}
] | typescript | !.role, content: truncated }); |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) };
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
| if (lines.length === 0 && this.required) { |
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
} | src/ConversationHistory.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return tokenizer.encode(output.join(this.separator)).length;\n } else {\n let length = 0;\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n length += section.layout.length;\n }\n }\n return length;",
"score": 62.720889495218046
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " const truncated = tokenizer.decode(encoded.slice(0, delta));\n output.push({ role: msg!.role, content: truncated });\n length += delta;\n }\n }\n }\n return { output: output, length: length, tooLong: length > maxTokens };\n }\n public static getMessageText(message: Message): string {\n let text = message.content ?? '';",
"score": 59.65404477098286
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 57.67425460256854
},
{
"filename": "src/TemplateSection.ts",
"retrieved_chunk": " for (let i = 0; i < this.template.length; i++) {\n const char = this.template[i];\n switch (state) {\n case ParseState.inText:\n if (char === '{' && this.template[i + 1] === '{') {\n if (part.length > 0) {\n this._parts.push(this.createTextRenderer(part));\n part = '';\n }\n state = ParseState.inParameter;",
"score": 55.64440091130827
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " );\n // Build output\n const output: string[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(section.layout.output);\n }\n }\n const text = output.join(this.separator);",
"score": 55.47763592394877
}
] | typescript | if (lines.length === 0 && this.required) { |
import { Message, PromptFunctions, PromptMemory, PromptSection, RenderedPromptSection, Tokenizer } from "./types";
/**
* Abstract Base class for most prompt sections.
*/
export abstract class PromptSectionBase implements PromptSection {
public readonly required: boolean;
public readonly tokens: number;
public readonly separator: string;
public readonly textPrefix: string;
/**
* Creates a new 'PromptSectionBase' instance.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix: string = '') {
this.required = required;
this.tokens = tokens;
this.separator = separator;
this.textPrefix = textPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Render as messages
const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);
// Convert to text
let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);
// Calculate length
const prefixLength = tokenizer.encode(this.textPrefix).length;
const separatorLength = tokenizer.encode(this.separator).length;
let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);
// Truncate if fixed length
text = this.textPrefix + text;
if (this.tokens > 1.0 && length > this.tokens) {
const encoded = tokenizer.encode(text);
text = tokenizer.decode(encoded.slice(0, this.tokens));
length = this.tokens;
}
return { output: text, length: length, tooLong: length > maxTokens };
}
public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;
protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {
// Truncate if fixed length
if (this.tokens > 1.0) {
while (length > this.tokens) {
const msg = output.pop();
const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));
length -= encoded.length;
if (length < this.tokens) {
const delta = this.tokens - length;
const truncated = tokenizer.decode(encoded.slice(0, delta));
output.push({ role: msg!.role, content: truncated });
length += delta;
}
}
}
return { output: output, length: length, tooLong: length > maxTokens };
}
public static getMessageText(message: Message): string {
let text = message.content ?? '';
if (message.function_call) {
text = JSON.stringify(message.function_call);
} else if | (message.name) { |
text = `${message.name} returned ${text}`;
}
return text;
}
} | src/PromptSectionBase.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/FunctionCallMessage.ts",
"retrieved_chunk": " this.function_call = function_call;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._length = tokenizer.encode(JSON.stringify(this.function_call)).length;\n }\n // Return output\n return this.returnMessages([{ role: 'assistant', content: null, function_call: this.function_call }], this._length, tokenizer, maxTokens);\n }",
"score": 36.88947868087338
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " const messages: Message[] = [];\n for (let i = history.length - 1; i >= 0; i--) {\n // Clone message\n const msg = history[i];\n const message: Message = Object.assign({}, msg);\n if (msg.content !== null) {\n message.content = Utilities.toString(tokenizer, msg.content);\n }\n // Get message length\n const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;",
"score": 36.21219287365173
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " // Add message\n tokens += length;\n messages.unshift(message);\n }\n return { output: messages, length: tokens, tooLong: tokens > maxTokens };\n }\n}",
"score": 28.3429846108999
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " const separatorLength = tokenizer.encode(this.separator).length;\n const lines: string[] = [];\n for (let i = history.length - 1; i >= 0; i--) {\n const msg = history[i];\n const message: Message = { role: msg.role, content: Utilities.toString(tokenizer, msg.content) };\n const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;\n const line = prefix + message.content;\n const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);\n // Add initial line if required\n if (lines.length === 0 && this.required) {",
"score": 25.803976987022978
},
{
"filename": "src/ConversationHistory.ts",
"retrieved_chunk": " // Add initial message if required\n if (messages.length === 0 && this.required) {\n tokens += length;\n messages.unshift(message);\n continue;\n }\n // Stop if we're over the token budget\n if (tokens + length > budget) {\n break;\n }",
"score": 23.67466572754214
}
] | typescript | (message.name) { |
import { log } from "./log";
import { AssembledProgram, Option } from "./types";
import { maskOfSize } from "./util";
/**
* Builds the output buffer from the matched instructions
* @param program The configured program we have built
* @param word16Align If true, align the 12 bit opcodes to 16 bit words. The lowest nibble will be 0
* @returns The output buffer that should be written to the assembled binary
*/
export const outputInstructions = (
program: AssembledProgram,
word16Align: boolean
): Option<Buffer> => {
// This buffer stores each nibble of the program separately, and we will combine this later into the output buffer
const threeNibbleBuffer: number[] = new Array(8192 * 3);
// Fill array with 0xF
for (let i = 0; i < threeNibbleBuffer.length; i++) {
threeNibbleBuffer[i] = 0xf;
}
for (const instruction of program.matchedInstructions) {
let opcode = 0;
switch (instruction.type) {
case "literal": {
opcode = buildOpcode(instruction.opcodeString, 0, 0);
break;
}
case "immediate": {
opcode = buildOpcode(
instruction.opcodeString,
instruction.bitCount,
instruction.immediate
);
break;
}
case "label": {
const label = program.matchedLabels[instruction.label];
if (!label) {
log(`Unknown label ${instruction.label}`, instruction.lineNumber);
return { type: "none" };
}
opcode = buildOpcode(
instruction.opcodeString,
instruction.bitCount,
label.address
);
break;
}
case "constant": {
if (instruction.subtype === "literal") {
opcode = instruction.value;
} else {
// Label
const label = program.matchedLabels[instruction.label];
if (!label) {
log(`Unknown label ${instruction.label}`, instruction.lineNumber);
return { type: "none" };
}
console.log(`${label.address.toString(16)}`);
opcode = label.address;
}
break;
}
}
const low = opcode & 0xf;
const mid = (opcode & 0xf0) >> 4;
const high = (opcode & 0xf00) >> 8;
const baseAddress = instruction.address * 3;
// We use reverse order because that's how the nibbles are in the ROM
threeNibbleBuffer[baseAddress] = high;
threeNibbleBuffer[baseAddress + 1] = mid;
threeNibbleBuffer[baseAddress + 2] = low;
}
return {
type: "some",
value: copyToOutputBuffer(threeNibbleBuffer, word16Align),
};
};
const copyToOutputBuffer = (
threeNibbleBuffer: number[],
word16Align: boolean
): Buffer => {
const bufferSize = word16Align ? 8192 * 2 : (8192 * 3) / 2;
const buffer = Buffer.alloc(bufferSize);
let byteBuffer = 0;
let bufferAddress = 0;
let lowNibble = false;
let evenByte = true;
for (let i = 0; i < threeNibbleBuffer.length; i++) {
const nibble = threeNibbleBuffer[i]!;
const writeSpacerValue = word16Align && !lowNibble && evenByte;
if (lowNibble || writeSpacerValue) {
// "Second", lower value of byte, or we're writing the spacer now
byteBuffer |= nibble;
buffer[bufferAddress] = byteBuffer;
bufferAddress += 1;
byteBuffer = 0;
evenByte = !evenByte;
} else {
// "First", upper value of byte
byteBuffer |= nibble << 4;
}
if (!writeSpacerValue) {
// We've moved to the next byte if we wrote a spacer, so stay at !lowNibble
lowNibble = !lowNibble;
}
}
return buffer;
};
/**
* Comsumes the opcode template from the BASS arch file and produces the actual output word
* @param template The opcode template from the BASS arch file
* @param argSize The number of bits in an argument to the opcode, if any
* @param argument The actual data to pass as an argument to the opcode, if any
* @returns The output opcode as a 12 bit word
*/
export const buildOpcode = (
template: string,
argSize: number,
argument: number
) => {
let index = 0;
let outputWord = 0;
while (index < template.length) {
const char = template[index];
if (char === "%") {
// Consume chars until whitespace
let data = 0;
let count = 0;
for (let i = 1; i < Math.min(13, template.length - index); i++) {
const nextChar = template[index + i]!;
if (nextChar !== "1" && nextChar !== "0") {
// Stop consuming
break;
}
data <<= 1;
data |= nextChar === "1" ? 1 : 0;
count += 1;
}
// Consume the next four chars as bits
outputWord <<= count;
outputWord |= data;
index += count + 1;
} else if (char === "=") {
if (template[index + 1] !== "a") {
console.log(
`ERROR: Unexpected char after = in instruction definition "${template}"`
);
return 0;
}
outputWord <<= argSize;
outputWord |= | maskOfSize(argSize) & argument; |
index += 2;
} else {
index += 1;
}
}
return outputWord;
};
| src/lib/opcodeOutput.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n }\n};\nconst buildSortableOpcode = (template: string, bitCount: number) =>\n buildOpcode(template, bitCount, 0);\nconst cleanAndFinishInstructionRegex = (instruction: string): RegExp => {\n const cleaned = instruction\n .trim()\n .replace(whitespaceRegex, whitespaceRegex.source);\n // Force nothing but whitespace from beginning of string to instruction",
"score": 12.79872538196529
},
{
"filename": "src/lib/log.ts",
"retrieved_chunk": "/**\n * Logs an error message with a line number and message\n * @param message The error message to display\n * @param lineNumber The one-based index of the line that generated the error\n */\nexport const log = (message: String, lineNumber: Number) =>\n console.log(`ERROR (line ${lineNumber}): ${message}`);",
"score": 10.066896878305512
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 9.786480247155751
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " return;\n }\n const sections = line.split(\";\");\n if (sections.length != 2) {\n log(\n \"Unexpected semicolon. Does this instruction have an output?\",\n lineNumber\n );\n return;\n }",
"score": 9.765040293706406
},
{
"filename": "src/extractIcons.ts",
"retrieved_chunk": " images.push({\n address: i - lbpxCount * 2,\n width: lbpxCount + 1,\n });\n lbpxCount = 0;\n } else {\n // If there's a gap in instructions, reset lbpxCount\n lbpxCount = 0;\n }\n }",
"score": 8.305264958551255
}
] | typescript | maskOfSize(argSize) & argument; |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const message: Message = { role | : msg.role, content: Utilities.toString(tokenizer, msg.content) }; |
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
if (lines.length === 0 && this.required) {
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
} | src/ConversationHistory.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 67.24666596141581
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " const truncated = tokenizer.decode(encoded.slice(0, delta));\n output.push({ role: msg!.role, content: truncated });\n length += delta;\n }\n }\n }\n return { output: output, length: length, tooLong: length > maxTokens };\n }\n public static getMessageText(message: Message): string {\n let text = message.content ?? '';",
"score": 59.392389002369875
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return tokenizer.encode(output.join(this.separator)).length;\n } else {\n let length = 0;\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n length += section.layout.length;\n }\n }\n return length;",
"score": 55.51025166515105
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 54.42216313410072
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);\n // Truncate if fixed length\n text = this.textPrefix + text;\n if (this.tokens > 1.0 && length > this.tokens) {\n const encoded = tokenizer.encode(text);\n text = tokenizer.decode(encoded.slice(0, this.tokens));\n length = this.tokens;\n }\n return { output: text, length: length, tooLong: length > maxTokens };\n }",
"score": 53.14244635528218
}
] | typescript | : msg.role, content: Utilities.toString(tokenizer, msg.content) }; |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
return this | .returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens); |
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const vaue = memory.get(name);
return Promise.resolve(Utilities.toString(tokenizer, vaue));
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const value = await functions.invoke(name, memory, functions, tokenizer, args);
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
} | src/TemplateSection.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 64.28399081081047
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " this.textPrefix = textPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Render as messages\n const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);\n // Convert to text\n let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);\n // Calculate length\n const prefixLength = tokenizer.encode(this.textPrefix).length;\n const separatorLength = tokenizer.encode(this.separator).length;",
"score": 61.721211465951605
},
{
"filename": "src/GroupSection.ts",
"retrieved_chunk": " super(tokens, required, separator, textPrefix);\n this._layoutEngine = new LayoutEngine(sections, tokens, required, separator);\n this.sections = sections;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Render sections to text\n const { output, length, tooLong } = await this._layoutEngine.renderAsText(memory, functions, tokenizer, maxTokens);\n // Return output as a single message\n return this.returnMessages([{ role: this.role, content: output }], length, tokenizer, maxTokens);",
"score": 59.96099702294202
},
{
"filename": "src/FunctionCallMessage.ts",
"retrieved_chunk": " this.function_call = function_call;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._length = tokenizer.encode(JSON.stringify(this.function_call)).length;\n }\n // Return output\n return this.returnMessages([{ role: 'assistant', content: null, function_call: this.function_call }], this._length, tokenizer, maxTokens);\n }",
"score": 56.91469470912602
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return { output: text, length: tokenizer.encode(text).length, tooLong: remaining < 0 };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Start a new layout\n // - Adds all sections from the current LayoutEngine hierarchy to a flat array\n const layout: PromptSectionLayout<Message[]>[] = [];\n this.addSectionsToLayout(this.sections, layout);\n // Layout sections\n const remaining = await this.layoutSections(\n layout,",
"score": 53.50353791802761
}
] | typescript | .returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens); |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const | vaue = memory.get(name); |
return Promise.resolve(Utilities.toString(tokenizer, vaue));
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const value = await functions.invoke(name, memory, functions, tokenizer, args);
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
} | src/TemplateSection.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface PromptFunctions {\n has(name: string): boolean;\n get(name: string): PromptFunction;\n invoke(name: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any>;\n}\nexport interface Tokenizer {\n decode(tokens: number[]): string;\n encode(text: string): number[];\n}",
"score": 53.14529312286891
},
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " }\n this._functions.set(name, value);\n }\n public invoke(key: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any> {\n const fn = this.get(key);\n return fn(memory, functions, tokenizer, args);\n }\n}",
"score": 47.90072881974463
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 39.004518492700704
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 38.50142886128911
},
{
"filename": "src/PromptSectionBase.spec.ts",
"retrieved_chunk": "}\nexport class MultiTestSection extends PromptSectionBase {\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n return this.returnMessages([{ role: 'test', content: 'Hello Big' },{ role: 'test', content: 'World' }], 3, tokenizer, maxTokens);\n }\n}\ndescribe(\"PromptSectionBase\", () => {\n const memory = new VolatileMemory();\n const functions = new FunctionRegistry();\n const tokenizer = new GPT3Tokenizer();",
"score": 36.654189200980134
}
] | typescript | vaue = memory.get(name); |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A section that renders the conversation history.
*/
export class ConversationHistory extends PromptSectionBase {
public readonly variable: string;
public readonly userPrefix: string;
public readonly assistantPrefix: string;
/**
* Creates a new 'ConversationHistory' instance.
* @param variable Name of memory variable used to store the histories `Message[]`.
* @param tokens Optional. Sizing strategy for this section. Defaults to `proportional` with a value of `1.0`.
* @param required Optional. Indicates if this section is required. Defaults to `false`.
* @param userPrefix Optional. Prefix to use for user messages when rendering as text. Defaults to `user: `.
* @param assistantPrefix Optional. Prefix to use for assistant messages when rendering as text. Defaults to `assistant: `.
*/
public constructor(variable: string, tokens: number = 1.0, required: boolean = false, userPrefix: string = 'user: ', assistantPrefix: string = 'assistant: ', separator: string = '\n') {
super(tokens, required, separator);
this.variable = variable;
this.userPrefix = userPrefix;
this.assistantPrefix = assistantPrefix;
}
public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate history and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const separatorLength = tokenizer.encode(this.separator).length;
const lines: string[] = [];
for (let i = history.length - 1; i >= 0; i--) {
const msg = history[i];
const message: Message = { role: | msg.role, content: Utilities.toString(tokenizer, msg.content) }; |
const prefix = message.role === 'user' ? this.userPrefix : this.assistantPrefix;
const line = prefix + message.content;
const length = tokenizer.encode(line).length + (lines.length > 0 ? separatorLength : 0);
// Add initial line if required
if (lines.length === 0 && this.required) {
tokens += length;
lines.unshift(line);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add line
tokens += length;
lines.unshift(line);
}
return { output: lines.join(this.separator), length: tokens, tooLong: tokens > maxTokens };
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Get messages from memory
const history: Message[] = memory.has(this.variable) ? (memory.get(this.variable) as Message[]).slice() : [];
// Populate messages and stay under the token budget
let tokens = 0;
const budget = this.tokens > 1.0 ? Math.min(this.tokens, maxTokens) : maxTokens;
const messages: Message[] = [];
for (let i = history.length - 1; i >= 0; i--) {
// Clone message
const msg = history[i];
const message: Message = Object.assign({}, msg);
if (msg.content !== null) {
message.content = Utilities.toString(tokenizer, msg.content);
}
// Get message length
const length = tokenizer.encode(PromptSectionBase.getMessageText(message)).length;
// Add initial message if required
if (messages.length === 0 && this.required) {
tokens += length;
messages.unshift(message);
continue;
}
// Stop if we're over the token budget
if (tokens + length > budget) {
break;
}
// Add message
tokens += length;
messages.unshift(message);
}
return { output: messages, length: tokens, tooLong: tokens > maxTokens };
}
} | src/ConversationHistory.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " public abstract renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>>;\n protected returnMessages(output: Message[], length: number, tokenizer: Tokenizer, maxTokens: number): RenderedPromptSection<Message[]> {\n // Truncate if fixed length\n if (this.tokens > 1.0) {\n while (length > this.tokens) {\n const msg = output.pop();\n const encoded = tokenizer.encode(PromptSectionBase.getMessageText(msg!));\n length -= encoded.length;\n if (length < this.tokens) {\n const delta = this.tokens - length;",
"score": 67.24666596141581
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " const truncated = tokenizer.decode(encoded.slice(0, delta));\n output.push({ role: msg!.role, content: truncated });\n length += delta;\n }\n }\n }\n return { output: output, length: length, tooLong: length > maxTokens };\n }\n public static getMessageText(message: Message): string {\n let text = message.content ?? '';",
"score": 59.392389002369875
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return tokenizer.encode(output.join(this.separator)).length;\n } else {\n let length = 0;\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n length += section.layout.length;\n }\n }\n return length;",
"score": 55.51025166515105
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " maxTokens,\n (section) => section.renderAsMessages(memory, functions, tokenizer, maxTokens),\n (section, remaining) => section.renderAsMessages(memory, functions, tokenizer, remaining)\n );\n // Build output\n const output: Message[] = [];\n for (let i = 0; i < layout.length; i++) {\n const section = layout[i];\n if (section.layout) {\n output.push(...section.layout.output);",
"score": 54.42216313410072
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " let length = prefixLength + asMessages.length + ((asMessages.output.length - 1) * separatorLength);\n // Truncate if fixed length\n text = this.textPrefix + text;\n if (this.tokens > 1.0 && length > this.tokens) {\n const encoded = tokenizer.encode(text);\n text = tokenizer.decode(encoded.slice(0, this.tokens));\n length = this.tokens;\n }\n return { output: text, length: length, tooLong: length > maxTokens };\n }",
"score": 53.14244635528218
}
] | typescript | msg.role, content: Utilities.toString(tokenizer, msg.content) }; |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
| if (hasInstruction && program.unmatchedLabels.length > 0) { |
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 11.335093963276176
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 9.747223818310438
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 9.176922498874115
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 8.964641256279505
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": "import { log } from \"./log\";\nimport { AssembledProgram, Option } from \"./types\";\nimport { maskOfSize } from \"./util\";\n/**\n * Builds the output buffer from the matched instructions\n * @param program The configured program we have built\n * @param word16Align If true, align the 12 bit opcodes to 16 bit words. The lowest nibble will be 0\n * @returns The output buffer that should be written to the assembled binary\n */\nexport const outputInstructions = (",
"score": 8.463722665800498
}
] | typescript | if (hasInstruction && program.unmatchedLabels.length > 0) { |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
| return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens); |
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const vaue = memory.get(name);
return Promise.resolve(Utilities.toString(tokenizer, vaue));
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const value = await functions.invoke(name, memory, functions, tokenizer, args);
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
} | src/TemplateSection.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 66.91403947640147
},
{
"filename": "src/PromptSectionBase.ts",
"retrieved_chunk": " this.textPrefix = textPrefix;\n }\n public async renderAsText(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<string>> {\n // Render as messages\n const asMessages = await this.renderAsMessages(memory, functions, tokenizer, maxTokens);\n // Convert to text\n let text = asMessages.output.map((message) => PromptSectionBase.getMessageText(message)).join(this.separator);\n // Calculate length\n const prefixLength = tokenizer.encode(this.textPrefix).length;\n const separatorLength = tokenizer.encode(this.separator).length;",
"score": 63.975685288005046
},
{
"filename": "src/GroupSection.ts",
"retrieved_chunk": " super(tokens, required, separator, textPrefix);\n this._layoutEngine = new LayoutEngine(sections, tokens, required, separator);\n this.sections = sections;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Render sections to text\n const { output, length, tooLong } = await this._layoutEngine.renderAsText(memory, functions, tokenizer, maxTokens);\n // Return output as a single message\n return this.returnMessages([{ role: this.role, content: output }], length, tokenizer, maxTokens);",
"score": 62.27914889165202
},
{
"filename": "src/FunctionCallMessage.ts",
"retrieved_chunk": " this.function_call = function_call;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._length = tokenizer.encode(JSON.stringify(this.function_call)).length;\n }\n // Return output\n return this.returnMessages([{ role: 'assistant', content: null, function_call: this.function_call }], this._length, tokenizer, maxTokens);\n }",
"score": 59.41151221957349
},
{
"filename": "src/LayoutEngine.ts",
"retrieved_chunk": " return { output: text, length: tokenizer.encode(text).length, tooLong: remaining < 0 };\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Start a new layout\n // - Adds all sections from the current LayoutEngine hierarchy to a flat array\n const layout: PromptSectionLayout<Message[]>[] = [];\n this.addSectionsToLayout(this.sections, layout);\n // Layout sections\n const remaining = await this.layoutSections(\n layout,",
"score": 55.428390567495406
}
] | typescript | return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens); |
import { Instruction } from "./bass";
export interface MatchedInstructionBase {
line: string;
lineNumber: number;
address: number;
}
export type ConstantLiteralMatchedInstruction = MatchedInstructionBase & {
type: "constant";
subtype: "literal";
value: number;
};
export type ConstantLabelMatchedInstruction = MatchedInstructionBase & {
type: "constant";
subtype: "label";
label: string;
};
export type ImmediateMatchedInstruction = MatchedInstructionBase & {
type: "immediate";
immediate: number;
bitCount: number;
opcodeString: string;
};
export type LabelMatchedInstruction = MatchedInstructionBase & {
type: "label";
label: string;
bitCount: number;
opcodeString: string;
};
export type LiteralMatchedInstruction = MatchedInstructionBase & {
type: "literal";
opcodeString: string;
};
export interface AssembledProgram {
currentAddress: number;
matchedInstructions: Array<
| ConstantLiteralMatchedInstruction
| ConstantLabelMatchedInstruction
| ImmediateMatchedInstruction
| LabelMatchedInstruction
| LiteralMatchedInstruction
>;
matchedLabels: {
[name: string]: {
lineNumber: number;
instructionIndex: number;
address: number;
};
};
unmatchedLabels: Array<{
label: string;
lineNumber: number;
}>;
}
/// Disassembly ///
export interface DisassembledInstruction {
| instruction: Instruction; |
actualWord: number;
address: number;
}
export interface Some<T> {
type: "some";
value: T;
}
export interface None {
type: "none";
}
export type Option<T> = Some<T> | None;
| src/lib/types.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " instructions: Array<Instruction>;\n}\nexport type Instruction = ImmediateInstruction | LiteralInstruction;\nexport interface InstructionBase {\n regex: RegExp;\n opcodeString: string;\n sortableOpcode: number;\n originalInstruction: string;\n}\nexport type ImmediateInstruction = InstructionBase & {",
"score": 16.154490532992686
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": "import { ImmediateInstruction, Instruction } from \"./bass\";\nimport { buildDisassembledInstructionString } from \"./display\";\nimport { DisassembledInstruction } from \"./types\";\nimport { maskOfSize } from \"./util\";\nexport const parseBinaryBuffer = (\n buffer: Buffer,\n instructions: Instruction[]\n): string => {\n const disassembledInstructions: DisassembledInstruction[] = [];\n const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(",
"score": 14.384314242599011
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " // Instruction on this line, pair them up\n program.matchedLabels[label] = {\n lineNumber,\n instructionIndex: program.matchedInstructions.length - 1,\n address: program.currentAddress - 1,\n };\n } else {\n // Will pair with some future instruction. Queue it\n program.unmatchedLabels.push({\n label,",
"score": 9.410851123099834
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " let labelCount = 0;\n const namedLabels: Array<\n | {\n name: string;\n instructions: DisassembledInstruction[];\n }\n | undefined\n > = unsetLabels.map((instructions) => {\n if (!!instructions) {\n return {",
"score": 8.43021901562426
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " program.currentAddress += 1;\n break;\n }\n }\n if (hasInstruction && program.unmatchedLabels.length > 0) {\n // Add queued labels\n for (const label of program.unmatchedLabels) {\n const existingLabel = program.matchedLabels[label.label];\n if (existingLabel) {\n log(",
"score": 7.520751129896283
}
] | typescript | instruction: Instruction; |
import { Message, PromptFunctions, PromptMemory, RenderedPromptSection, Tokenizer } from "./types";
import { PromptSectionBase } from "./PromptSectionBase";
import { Utilities } from "./Utilities";
/**
* A template section that will be rendered as a message.
* @remarks
* This section type is used to render a template as a message. The template can contain
* parameters that will be replaced with values from memory or call functions to generate
* dynamic content.
*
* Template syntax:
* - `{{$memoryKey}}` - Renders the value of the specified memory key.
* - `{{functionName}}` - Calls the specified function and renders the result.
* - `{{functionName arg1 arg2 ...}}` - Calls the specified function with the provided list of arguments.
*
* Function arguments are optional and separated by spaces. They can be quoted using `'`, `"`, or `\`` delimiters.
*/
export class TemplateSection extends PromptSectionBase {
private _parts: PartRenderer[] = [];
public readonly template: string;
public readonly role: string;
/**
* Creates a new 'TemplateSection' instance.
* @param template Template to use for this section.
* @param role Message role to use for this section.
* @param tokens Optional. Sizing strategy for this section. Defaults to `auto`.
* @param required Optional. Indicates if this section is required. Defaults to `true`.
* @param separator Optional. Separator to use between sections when rendering as text. Defaults to `\n`.
* @param textPrefix Optional. Prefix to use for text output. Defaults to `undefined`.
*/
public constructor(template: string, role: string, tokens: number = -1, required: boolean = true, separator: string = '\n', textPrefix?: string) {
super(tokens, required, separator, textPrefix);
this.template = template;
this.role = role;
this.parseTemplate();
}
public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {
// Render parts in parallel
const renderedParts = await Promise.all(this._parts.map((part) => part(memory, functions, tokenizer, maxTokens)));
// Join all parts
const text = renderedParts.join('');
const length = tokenizer.encode(text).length;
// Return output
return this.returnMessages([{ role: this.role, content: text }], length, tokenizer, maxTokens);
}
private parseTemplate(): void {
// Parse template
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < this.template.length; i++) {
const char = this.template[i];
switch (state) {
case ParseState.inText:
if (char === '{' && this.template[i + 1] === '{') {
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
part = '';
}
state = ParseState.inParameter;
i++;
} else {
part += char;
}
break;
case ParseState.inParameter:
if (char === '}' && this.template[i + 1] === '}') {
if (part.length > 0) {
if (part[0] === '$') {
this._parts.push(this.createVariableRenderer(part.substring(1)));
} else {
this._parts.push(this.createFunctionRenderer(part));
}
part = '';
}
state = ParseState.inText;
i++;
} else if (["'", '"', '`'].includes(char)) {
stringDelim = char;
state = ParseState.inString;
part += char;
} else {
part += char;
}
break;
case ParseState.inString:
part += char;
if (char === stringDelim) {
state = ParseState.inParameter;
}
break;
}
}
// Ensure we ended in the correct state
if (state !== ParseState.inText) {
throw new Error(`Invalid template: ${this.template}`);
}
// Add final part
if (part.length > 0) {
this._parts.push(this.createTextRenderer(part));
}
}
private createTextRenderer(text: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
return Promise.resolve(text);
};
}
private createVariableRenderer(name: string): PartRenderer {
return (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
const vaue = memory.get(name);
return Promise.resolve(Utilities.toString(tokenizer, vaue));
};
}
private createFunctionRenderer(param: string): PartRenderer {
let name = '';
let args: string[] = [];
function savePart() {
if (part.length > 0) {
if (!name) {
name = part;
} else {
args.push(part);
}
part = '';
}
}
// Parse function name and args
let part = '';
let state = ParseState.inText;
let stringDelim = '';
for (let i = 0; i < param.length; i++) {
const char = param[i];
switch (state) {
case ParseState.inText:
if (["'", '"', '`'].includes(char)) {
savePart();
stringDelim = char;
state = ParseState.inString;
} else if (char == ' ') {
savePart();
} else {
part += char;
}
break;
case ParseState.inString:
if (char === stringDelim) {
savePart();
state = ParseState.inText;
} else {
part += char;
}
break;
}
}
// Add final part
savePart();
// Return renderer
return async (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<string> => {
| const value = await functions.invoke(name, memory, functions, tokenizer, args); |
return Utilities.toString(tokenizer, value);
};
}
}
type PartRenderer = (memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number) => Promise<string>;
enum ParseState {
inText,
inParameter,
inString
} | src/TemplateSection.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " }\n this._functions.set(name, value);\n }\n public invoke(key: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any> {\n const fn = this.get(key);\n return fn(memory, functions, tokenizer, args);\n }\n}",
"score": 33.339805731641576
},
{
"filename": "src/types.ts",
"retrieved_chunk": "}\nexport interface PromptFunctions {\n has(name: string): boolean;\n get(name: string): PromptFunction;\n invoke(name: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any>;\n}\nexport interface Tokenizer {\n decode(tokens: number[]): string;\n encode(text: string): number[];\n}",
"score": 29.17630551924783
},
{
"filename": "src/FunctionResponseMessage.ts",
"retrieved_chunk": " this.name = name;\n this.response = response;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache response text and length\n if (this._length < 0) {\n this._text = Utilities.toString(tokenizer, this.response);\n this._length = tokenizer.encode(this.name).length + tokenizer.encode(this._text).length;\n }\n // Return output",
"score": 22.549556828339984
},
{
"filename": "src/TextSection.ts",
"retrieved_chunk": " this.text = text;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Calculate and cache length\n if (this._length < 0) {\n this._length = tokenizer.encode(this.text).length;\n }\n // Return output\n return this.returnMessages([{ role: this.role, content: this.text }], this._length, tokenizer, maxTokens);",
"score": 20.659036635237975
},
{
"filename": "src/GroupSection.ts",
"retrieved_chunk": " super(tokens, required, separator, textPrefix);\n this._layoutEngine = new LayoutEngine(sections, tokens, required, separator);\n this.sections = sections;\n this.role = role;\n }\n public async renderAsMessages(memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, maxTokens: number): Promise<RenderedPromptSection<Message[]>> {\n // Render sections to text\n const { output, length, tooLong } = await this._layoutEngine.renderAsText(memory, functions, tokenizer, maxTokens);\n // Return output as a single message\n return this.returnMessages([{ role: this.role, content: output }], length, tokenizer, maxTokens);",
"score": 20.6454514611339
}
] | typescript | const value = await functions.invoke(name, memory, functions, tokenizer, args); |
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
| if (isPset(lastInstruction.instruction)) { |
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
| src/lib/disassembly.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 17.267750447375867
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 15.132602814615908
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " const matches = instruction.regex.exec(line);\n const address = program.currentAddress;\n if (!!matches && matches.length > 0) {\n if (matches[1] !== undefined) {\n // immediate\n if (instruction.type !== \"immediate\") {\n log(\n \"Attempted to match content with non-immediate instruction\",\n lineNumber\n );",
"score": 14.305897080021518
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " return;\n }\n program.matchedInstructions.push({\n type: \"immediate\",\n line,\n immediate: parseNumber(matches[1]),\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,",
"score": 12.553789280075076
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " program.matchedInstructions.push({\n type: \"label\",\n line,\n label: matches[2],\n opcodeString: instruction.opcodeString,\n bitCount: instruction.immediate.bitCount,\n lineNumber,\n address,\n });\n } else {",
"score": 12.42472683772329
}
] | typescript | if (isPset(lastInstruction.instruction)) { |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
| const matches = labelRegex.exec(line); |
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 16.846957557577085
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 14.43709769272575
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 12.836119759750346
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " stringLength: number;\n };\n};\nexport type LiteralInstruction = InstructionBase & {\n type: \"literal\";\n};\n/**\n * Parses a single line of a BASS architecture file\n * @param line The line being parsed\n * @param lineNumber The one-based index of the line being processed",
"score": 12.607405564253707
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 12.071070508393419
}
] | typescript | const matches = labelRegex.exec(line); |
import { strict as assert } from "assert";
import { VolatileMemory } from "./VolatileMemory";
describe("VolatileMemory", () => {
describe("constructor", () => {
it("should create a VolatileMemory", () => {
const memory = new VolatileMemory();
assert.notEqual(memory, null);
});
it("should create a VolatileMemory with initial values", () => {
const memory = new VolatileMemory({
"test": 123
});
assert.notEqual(memory, null);
assert.equal(memory.has("test"), true);
});
});
const obj = { foo: 'bar' };
const memory = new VolatileMemory();
describe("set", () => {
it("should set a primitive value", () => {
memory.set("test", 123);
assert.equal(memory.has("test"), true);
});
it("should set an object", () => {
memory.set("test2", obj);
assert.equal(memory.has("test2"), true);
});
});
describe("get", () => {
it("should get a primitive value", () => {
const value = memory.get("test");
assert.equal(value, 123);
});
it("should get an object that's a clone", () => {
const value = memory.get("test2");
assert.deepEqual(value, { foo: 'bar' });
assert.notEqual(value, obj);
});
it("should return undefined when getting a value that doesn't exist", () => {
const value = memory.get("test3");
assert.equal(value, undefined);
});
});
describe("has", () => {
it("should return false when a value doesn't exist", () => {
assert.equal(memory.has("test3"), false);
});
it("should return true when a value exists", () => {
assert.equal(memory.has("test"), true);
});
});
describe("delete", () => {
it("should delete a value", () => {
memory.delete("test");
assert.equal(memory.has("test"), false);
assert.equal(memory.has("test2"), true);
});
});
describe("clear", () => {
it("should clear all values", () => {
memory.set("test", 123);
| memory.clear(); |
assert.equal(memory.has("test"), false);
assert.equal(memory.has("test2"), false);
});
});
});
| src/VolatileMemory.spec.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/VolatileMemory.ts",
"retrieved_chunk": " const clone = JSON.parse(JSON.stringify(value));\n this._memory.set(key, clone);\n } else {\n this._memory.set(key, value);\n }\n }\n public delete(key: string): void {\n this._memory.delete(key);\n }\n public clear(): void {",
"score": 37.48042830529519
},
{
"filename": "src/types.ts",
"retrieved_chunk": "export interface FunctionCall {\n name?: string;\n arguments?: string;\n}\nexport interface PromptMemory {\n has(key: string): boolean;\n get<TValue = any>(key: string): TValue;\n set<TValue = any>(key: string, value: TValue): void;\n delete(key: string): void;\n clear(): void;",
"score": 35.85620069759337
},
{
"filename": "src/FunctionRegistry.spec.ts",
"retrieved_chunk": " it(\"should throw when getting a function that doesn't exist\", () => {\n const registry = new FunctionRegistry();\n assert.throws(() => registry.get(\"test\"));\n });\n });\n describe(\"has\", () => {\n it(\"should return false when a function doesn't exist\", () => {\n const registry = new FunctionRegistry();\n assert.equal(registry.has(\"test\"), false);\n });",
"score": 26.16237801155618
},
{
"filename": "src/FunctionRegistry.spec.ts",
"retrieved_chunk": " });\n it(\"should create a FunctionRegistry with initial functions\", () => {\n const registry = new FunctionRegistry({\n \"test\": async (memory, functions, tokenizer, args) => { }\n });\n assert.notEqual(registry, null);\n assert.equal(registry.has(\"test\"), true);\n });\n });\n describe(\"addFunction\", () => {",
"score": 26.085079358789535
},
{
"filename": "src/FunctionRegistry.spec.ts",
"retrieved_chunk": " it(\"should return true when a function exists\", () => {\n const registry = new FunctionRegistry({\n \"test\": async (memory, functions, tokenizer, args) => { }\n });\n assert.equal(registry.has(\"test\"), true);\n });\n });\n describe(\"invoke\", () => {\n const memory = new VolatileMemory();\n const tokenizer = new GPT3Tokenizer();",
"score": 24.740396293753097
}
] | typescript | memory.clear(); |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program | .matchedLabels[label.label]; |
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
);
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " instruction.immediate\n );\n break;\n }\n case \"label\": {\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }",
"score": 34.756361158700834
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " } else {\n // Label\n const label = program.matchedLabels[instruction.label];\n if (!label) {\n log(`Unknown label ${instruction.label}`, instruction.lineNumber);\n return { type: \"none\" };\n }\n console.log(`${label.address.toString(16)}`);\n opcode = label.address;\n }",
"score": 30.40491032404055
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 23.54051517872265
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 22.11198436108476
},
{
"filename": "src/lib/disassembly.ts",
"retrieved_chunk": " const existingLabel = unsetLabels[pc];\n if (existingLabel) {\n existingLabel.push(disassembledInstruction);\n } else {\n unsetLabels[pc] = [disassembledInstruction];\n }\n }\n disassembledInstructions.push(disassembledInstruction);\n }\n // Build label names",
"score": 17.83660028958265
}
] | typescript | .matchedLabels[label.label]; |
import { strict as assert } from "assert";
import { FunctionRegistry } from "./FunctionRegistry";
import { VolatileMemory } from "./VolatileMemory";
import { GPT3Tokenizer } from "./GPT3Tokenizer";
describe("FunctionRegistry", () => {
describe("constructor", () => {
it("should create a FunctionRegistry", () => {
const registry = new FunctionRegistry();
assert.notEqual(registry, null);
assert.equal(registry.has("test"), false);
});
it("should create a FunctionRegistry with initial functions", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.notEqual(registry, null);
assert.equal(registry.has("test"), true);
});
});
describe("addFunction", () => {
it("should add a function", () => {
const registry = new FunctionRegistry();
registry.addFunction("test", async (memory, functions, tokenizer, args) => { });
assert.equal(registry.has("test"), true);
});
it("should throw when adding a function that already exists", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.throws(() => registry.addFunction("test", async (memory, functions, tokenizer, args) => { }));
});
});
describe("get", () => {
it("should get a function", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
| const fn = registry.get("test"); |
assert.notEqual(fn, null);
});
it("should throw when getting a function that doesn't exist", () => {
const registry = new FunctionRegistry();
assert.throws(() => registry.get("test"));
});
});
describe("has", () => {
it("should return false when a function doesn't exist", () => {
const registry = new FunctionRegistry();
assert.equal(registry.has("test"), false);
});
it("should return true when a function exists", () => {
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => { }
});
assert.equal(registry.has("test"), true);
});
});
describe("invoke", () => {
const memory = new VolatileMemory();
const tokenizer = new GPT3Tokenizer();
it("should invoke a function", async () => {
let called = false;
const registry = new FunctionRegistry({
"test": async (memory, functions, tokenizer, args) => {
assert.equal(args.length, 1);
assert.equal(args[0], "Hello World");
called = true;
}
});
await registry.invoke("test", memory, registry, tokenizer, ["Hello World"]);
assert.equal(called, true);
});
it("should throw when invoking a function that doesn't exist", () => {
const registry = new FunctionRegistry();
assert.throws(() => registry.invoke("test", memory, registry, tokenizer, ["Hello World"]));
});
});
});
| src/FunctionRegistry.spec.ts | Stevenic-promptrix-4a210d8 | [
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " }\n this._functions.set(name, value);\n }\n public invoke(key: string, memory: PromptMemory, functions: PromptFunctions, tokenizer: Tokenizer, args: string[]): Promise<any> {\n const fn = this.get(key);\n return fn(memory, functions, tokenizer, args);\n }\n}",
"score": 35.74995017337782
},
{
"filename": "src/TemplateSection.spec.ts",
"retrieved_chunk": " 'test': async (memory, functions, tokenizer, args) => 'Hello World',\n 'test2': async (memory, functions, tokenizer, args) => args[0],\n 'test3': async (memory, functions, tokenizer, args) => args.join(' '),\n });\n const tokenizer = new GPT3Tokenizer();\n describe(\"constructor\", () => {\n it(\"should create a TemplateSection\", () => {\n const section = new TemplateSection(\"Hello World\", \"user\");\n assert.equal(section.template, \"Hello World\");\n assert.equal(section.role, \"user\");",
"score": 33.28831458246205
},
{
"filename": "src/VolatileMemory.spec.ts",
"retrieved_chunk": " memory.set(\"test\", 123);\n assert.equal(memory.has(\"test\"), true);\n });\n it(\"should set an object\", () => {\n memory.set(\"test2\", obj);\n assert.equal(memory.has(\"test2\"), true);\n });\n });\n describe(\"get\", () => {\n it(\"should get a primitive value\", () => {",
"score": 32.02587072683796
},
{
"filename": "src/VolatileMemory.spec.ts",
"retrieved_chunk": " const value = memory.get(\"test\");\n assert.equal(value, 123);\n });\n it(\"should get an object that's a clone\", () => {\n const value = memory.get(\"test2\");\n assert.deepEqual(value, { foo: 'bar' });\n assert.notEqual(value, obj);\n });\n it(\"should return undefined when getting a value that doesn't exist\", () => {\n const value = memory.get(\"test3\");",
"score": 29.82633333303386
},
{
"filename": "src/FunctionRegistry.ts",
"retrieved_chunk": " public get(name: string): PromptFunction {\n const fn = this._functions.get(name);\n if (!fn) {\n throw new Error(`Function '${name}' not found.`);\n }\n return fn;\n }\n public addFunction(name: string, value: PromptFunction): void {\n if (this._functions.has(name)) {\n throw new Error(`Function '${name}' already exists.`);",
"score": 28.288758528404763
}
] | typescript | const fn = registry.get("test"); |
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
| if (instruction.sortableOpcode <= word) { |
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
| src/lib/disassembly.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " // Opcode - Source - Dest - Comments\n const splitInstruction = instructionString.split(/\\s+/);\n let lastPadWidth = 0;\n for (let i = 2; i >= splitInstruction.length - 1; i--) {\n lastPadWidth += columnPadWidth(i);\n }\n const formattedInstructionString = splitInstruction\n .map((s, i) => {\n const pad =\n i === splitInstruction.length - 1 ? lastPadWidth : columnPadWidth(i);",
"score": 28.0860111687704
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " let index = 0;\n let outputWord = 0;\n while (index < template.length) {\n const char = template[index];\n if (char === \"%\") {\n // Consume chars until whitespace\n let data = 0;\n let count = 0;\n for (let i = 1; i < Math.min(13, template.length - index); i++) {\n const nextChar = template[index + i]!;",
"score": 27.272824983750503
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": "): Buffer => {\n const bufferSize = word16Align ? 8192 * 2 : (8192 * 3) / 2;\n const buffer = Buffer.alloc(bufferSize);\n let byteBuffer = 0;\n let bufferAddress = 0;\n let lowNibble = false;\n let evenByte = true;\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n const nibble = threeNibbleBuffer[i]!;\n const writeSpacerValue = word16Align && !lowNibble && evenByte;",
"score": 26.35128744452312
},
{
"filename": "src/extractIcons.ts",
"retrieved_chunk": " for (let i = 0; i < buffer.length; i += 2) {\n // Skip the low byte of every word\n const highNibble = buffer[i]! & 0xf;\n if (highNibble === 0x9) {\n // LBPX\n // This is probably a set of pixels for an image\n lbpxCount += 1;\n } else if (highNibble === 0x1 && lbpxCount > 0) {\n // RETD\n // We have some number of possible pixels, so consider this a complete image write",
"score": 25.980256321906914
},
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 24.92383605197419
}
] | typescript | if (instruction.sortableOpcode <= word) { |
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive | = lastInstruction.actualWord & 0x1f; |
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
| src/lib/disassembly.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 10.855379359148742
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 10.660066097923035
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " const matches = instruction.regex.exec(line);\n const address = program.currentAddress;\n if (!!matches && matches.length > 0) {\n if (matches[1] !== undefined) {\n // immediate\n if (instruction.type !== \"immediate\") {\n log(\n \"Attempted to match content with non-immediate instruction\",\n lineNumber\n );",
"score": 10.596813071157168
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " lineNumber,\n });\n }\n }\n lineWithoutLabel = lineWithoutLabel.replace(commentRegex, \"\").trim();\n if (!hasInstruction && lineWithoutLabel.length > 0) {\n log(`Unknown instruction \"${lineWithoutLabel}\"`, lineNumber);\n }\n};\nif (argv.length != 4 && argv.length != 5) {",
"score": 9.902730023734293
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " });\n } else if (matches[2] !== undefined) {\n // potential label\n if (instruction.type !== \"immediate\") {\n log(\n \"Attempted to match content with non-immediate instruction\",\n lineNumber\n );\n return;\n }",
"score": 8.669484755270842
}
] | typescript | = lastInstruction.actualWord & 0x1f; |
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
labelUsageMap[instruction.address] = namedLabel.name;
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` | ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`; |
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
| src/lib/disassembly.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/display.ts",
"retrieved_chunk": "import { DisassembledInstruction } from \"./types\";\nimport { isLetterChar, maskOfSize } from \"./util\";\nexport const buildDisassembledInstructionString = (\n { instruction, actualWord, address }: DisassembledInstruction,\n immediateLabel: string | undefined\n) => {\n let instructionString = instruction.originalInstruction;\n if (instruction.type === \"immediate\") {\n const { bitCount, stringIndex, stringLength } = instruction.immediate;\n const immediatePrefix = instructionString.substring(0, stringIndex);",
"score": 19.66240845311435
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " console.log(`Received ${argv.length - 2} arguments. Expected 2-3\\n`);\n console.log(\n \"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}\"\n );\n process.exit(1);\n}\nconst archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst word16Align = argv[4] !== \"true\";",
"score": 14.643519038263667
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 13.147531500008625
},
{
"filename": "src/extractIcons.ts",
"retrieved_chunk": " console.log(`Received ${argv.length - 2} arguments. Expected 1\\n`);\n console.log(\"Usage: node extractIcons.js [input.bin]\");\n process.exit(1);\n}\nconst inputFile = argv[2] as string;\nconst build = async () => {\n const buffer = readFileSync(inputFile);\n generateImages(buffer);\n};\nbuild();",
"score": 11.239118571313584
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " return;\n }\n const sections = line.split(\";\");\n if (sections.length != 2) {\n log(\n \"Unexpected semicolon. Does this instruction have an output?\",\n lineNumber\n );\n return;\n }",
"score": 11.227061435935084
}
] | typescript | ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`; |
import { log } from "./log";
import { AssembledProgram, Option } from "./types";
import { maskOfSize } from "./util";
/**
* Builds the output buffer from the matched instructions
* @param program The configured program we have built
* @param word16Align If true, align the 12 bit opcodes to 16 bit words. The lowest nibble will be 0
* @returns The output buffer that should be written to the assembled binary
*/
export const outputInstructions = (
program: AssembledProgram,
word16Align: boolean
): Option<Buffer> => {
// This buffer stores each nibble of the program separately, and we will combine this later into the output buffer
const threeNibbleBuffer: number[] = new Array(8192 * 3);
// Fill array with 0xF
for (let i = 0; i < threeNibbleBuffer.length; i++) {
threeNibbleBuffer[i] = 0xf;
}
for (const instruction of program.matchedInstructions) {
let opcode = 0;
switch (instruction.type) {
case "literal": {
opcode = buildOpcode(instruction.opcodeString, 0, 0);
break;
}
case "immediate": {
opcode = buildOpcode(
instruction.opcodeString,
instruction.bitCount,
instruction.immediate
);
break;
}
case "label": {
const label = program.matchedLabels[instruction.label];
if (!label) {
log(`Unknown label ${instruction.label}`, instruction.lineNumber);
return { type: "none" };
}
opcode = buildOpcode(
instruction.opcodeString,
instruction.bitCount,
label.address
);
break;
}
case "constant": {
if (instruction.subtype === "literal") {
opcode = instruction.value;
} else {
// Label
const label = program.matchedLabels[instruction.label];
if (!label) {
log(`Unknown label ${instruction.label}`, instruction.lineNumber);
return { type: "none" };
}
console.log(`${label.address.toString(16)}`);
opcode = label.address;
}
break;
}
}
const low = opcode & 0xf;
const mid = (opcode & 0xf0) >> 4;
const high = (opcode & 0xf00) >> 8;
const baseAddress = instruction.address * 3;
// We use reverse order because that's how the nibbles are in the ROM
threeNibbleBuffer[baseAddress] = high;
threeNibbleBuffer[baseAddress + 1] = mid;
threeNibbleBuffer[baseAddress + 2] = low;
}
return {
type: "some",
value: copyToOutputBuffer(threeNibbleBuffer, word16Align),
};
};
const copyToOutputBuffer = (
threeNibbleBuffer: number[],
word16Align: boolean
): Buffer => {
const bufferSize = word16Align ? 8192 * 2 : (8192 * 3) / 2;
const buffer = Buffer.alloc(bufferSize);
let byteBuffer = 0;
let bufferAddress = 0;
let lowNibble = false;
let evenByte = true;
for (let i = 0; i < threeNibbleBuffer.length; i++) {
const nibble = threeNibbleBuffer[i]!;
const writeSpacerValue = word16Align && !lowNibble && evenByte;
if (lowNibble || writeSpacerValue) {
// "Second", lower value of byte, or we're writing the spacer now
byteBuffer |= nibble;
buffer[bufferAddress] = byteBuffer;
bufferAddress += 1;
byteBuffer = 0;
evenByte = !evenByte;
} else {
// "First", upper value of byte
byteBuffer |= nibble << 4;
}
if (!writeSpacerValue) {
// We've moved to the next byte if we wrote a spacer, so stay at !lowNibble
lowNibble = !lowNibble;
}
}
return buffer;
};
/**
* Comsumes the opcode template from the BASS arch file and produces the actual output word
* @param template The opcode template from the BASS arch file
* @param argSize The number of bits in an argument to the opcode, if any
* @param argument The actual data to pass as an argument to the opcode, if any
* @returns The output opcode as a 12 bit word
*/
export const buildOpcode = (
template: string,
argSize: number,
argument: number
) => {
let index = 0;
let outputWord = 0;
while (index < template.length) {
const char = template[index];
if (char === "%") {
// Consume chars until whitespace
let data = 0;
let count = 0;
for (let i = 1; i < Math.min(13, template.length - index); i++) {
const nextChar = template[index + i]!;
if (nextChar !== "1" && nextChar !== "0") {
// Stop consuming
break;
}
data <<= 1;
data |= nextChar === "1" ? 1 : 0;
count += 1;
}
// Consume the next four chars as bits
outputWord <<= count;
outputWord |= data;
index += count + 1;
} else if (char === "=") {
if (template[index + 1] !== "a") {
console.log(
`ERROR: Unexpected char after = in instruction definition "${template}"`
);
return 0;
}
outputWord <<= argSize;
| outputWord |= maskOfSize(argSize) & argument; |
index += 2;
} else {
index += 1;
}
}
return outputWord;
};
| src/lib/opcodeOutput.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " });\n }\n};\nconst buildSortableOpcode = (template: string, bitCount: number) =>\n buildOpcode(template, bitCount, 0);\nconst cleanAndFinishInstructionRegex = (instruction: string): RegExp => {\n const cleaned = instruction\n .trim()\n .replace(whitespaceRegex, whitespaceRegex.source);\n // Force nothing but whitespace from beginning of string to instruction",
"score": 12.79872538196529
},
{
"filename": "src/lib/log.ts",
"retrieved_chunk": "/**\n * Logs an error message with a line number and message\n * @param message The error message to display\n * @param lineNumber The one-based index of the line that generated the error\n */\nexport const log = (message: String, lineNumber: Number) =>\n console.log(`ERROR (line ${lineNumber}): ${message}`);",
"score": 11.963560323681065
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " const matchString = numberMatch[0];\n // This is guaranteed to exist due to the regex\n const bitCount = parseNumber(numberMatch[1]!);\n const index = numberMatch.index;\n const instructionLine =\n originalInstruction.substring(0, index) +\n \"(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))\" +\n originalInstruction.substring(index + matchString.length);\n const sortableOpcode = buildSortableOpcode(opcodeString, bitCount);\n config.instructions.push({",
"score": 10.994641124518695
},
{
"filename": "src/lib/display.ts",
"retrieved_chunk": " const immediateSuffix = instructionString.substring(\n stringIndex + stringLength\n );\n let immediate = \"\";\n if (immediateLabel) {\n immediate = immediateLabel;\n } else {\n const argument = maskOfSize(bitCount) & actualWord;\n if (isLetterChar(immediatePrefix.charAt(immediatePrefix.length - 1))) {\n // If letter, treat as decimal",
"score": 10.643369511100623
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " const [originalInstruction, opcode] = sections;\n if (!originalInstruction || !opcode) {\n log(\"Unknown input\", lineNumber);\n return;\n }\n const opcodeString = opcode.trim();\n let numberMatch = originalInstruction.match(bassNumberRegex);\n if (!!numberMatch && numberMatch.index) {\n // This instruction contains a star followed by a number\n // This is an immediate",
"score": 9.811230049781868
}
] | typescript | outputWord |= maskOfSize(argSize) & argument; |
import { ImmediateInstruction, Instruction } from "./bass";
import { buildDisassembledInstructionString } from "./display";
import { DisassembledInstruction } from "./types";
import { maskOfSize } from "./util";
export const parseBinaryBuffer = (
buffer: Buffer,
instructions: Instruction[]
): string => {
const disassembledInstructions: DisassembledInstruction[] = [];
const unsetLabels: Array<DisassembledInstruction[] | undefined> = new Array(
8192
);
for (let i = 0; i < buffer.length; i += 2) {
const highByte = buffer[i]!;
const lowByte = buffer[i + 1]!;
const address = i / 2;
const correctedWord = (highByte << 8) | lowByte;
const instruction = findWordInstruction(correctedWord, instructions);
const disassembledInstruction: DisassembledInstruction = {
instruction,
actualWord: correctedWord,
address,
};
if (isFlowControlWithImmediate(instruction)) {
// Convert local address into global one
const pcLowerByte =
correctedWord & maskOfSize(instruction.immediate.bitCount);
let pcUpperFive = (address >> 8) & 0x1f;
if (isCalz(instruction)) {
// calz is only zero page and prevents pset
pcUpperFive = 0;
} else {
const lastInstruction =
disassembledInstructions[disassembledInstructions.length - 1]!;
if (isPset(lastInstruction.instruction)) {
// PSET immediate determines our upper 5 bits
pcUpperFive = lastInstruction.actualWord & 0x1f;
}
}
const pc = (pcUpperFive << 8) | pcLowerByte;
const existingLabel = unsetLabels[pc];
if (existingLabel) {
existingLabel.push(disassembledInstruction);
} else {
unsetLabels[pc] = [disassembledInstruction];
}
}
disassembledInstructions.push(disassembledInstruction);
}
// Build label names
let labelCount = 0;
const namedLabels: Array<
| {
name: string;
instructions: DisassembledInstruction[];
}
| undefined
> = unsetLabels.map((instructions) => {
if (!!instructions) {
return {
name: `label_${labelCount++}`,
instructions,
};
}
return undefined;
});
// Build list of instructions that will replace the immedates with these labels, and build labels
const labelUsageMap: Array<string | undefined> = new Array(8192);
for (const namedLabel of namedLabels) {
if (namedLabel) {
for (const instruction of namedLabel.instructions) {
| labelUsageMap[instruction.address] = namedLabel.name; |
}
}
}
let output = "";
let address = 0;
for (const instruction of disassembledInstructions) {
const immediateLabel = labelUsageMap[instruction.address];
const lineLabel = namedLabels[instruction.address];
if (lineLabel) {
output += `\n${lineLabel.name}:\n`;
}
output += ` ${buildDisassembledInstructionString(
instruction,
immediateLabel
)}\n`;
address += 1;
}
return output;
};
const findWordInstruction = (word: number, instructions: Instruction[]) => {
// Naive because it doesn't really matter
let bestMatch = instructions[0]!;
for (let i = 0; i < instructions.length; i++) {
const instruction = instructions[i]!;
if (instruction.sortableOpcode <= word) {
bestMatch = instruction;
} else {
// We've passed the best solution, end
break;
}
}
return bestMatch;
};
const flowControlImmediateMnemonics = ((): Set<string> =>
new Set<string>(["call", "calz", "jp"]))();
const extractMnemonic = (instruction: Instruction): string =>
instruction.originalInstruction.split(/\s/)[0]!.trim();
const isFlowControlWithImmediate = (
instruction: Instruction
): instruction is ImmediateInstruction => {
const mnemonic = extractMnemonic(instruction);
return flowControlImmediateMnemonics.has(mnemonic);
};
const isPset = (instruction: Instruction): boolean => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "pset";
};
const isCalz = (instruction: Instruction) => {
const mnemonic = extractMnemonic(instruction);
return mnemonic === "calz";
};
| src/lib/disassembly.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/opcodeOutput.ts",
"retrieved_chunk": " program: AssembledProgram,\n word16Align: boolean\n): Option<Buffer> => {\n // This buffer stores each nibble of the program separately, and we will combine this later into the output buffer\n const threeNibbleBuffer: number[] = new Array(8192 * 3);\n // Fill array with 0xF\n for (let i = 0; i < threeNibbleBuffer.length; i++) {\n threeNibbleBuffer[i] = 0xf;\n }\n for (const instruction of program.matchedInstructions) {",
"score": 30.898620943422795
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " for (const command of commands) {\n const matches = command.regex.exec(line);\n if (!!matches && matches.length > 0) {\n command.action({ lineNumber, line }, matches, program);\n return;\n }\n }\n let hasInstruction = false;\n // Match line against all known instructions from the BASS arch\n for (const instruction of instructionSet.instructions) {",
"score": 24.144714513529017
},
{
"filename": "src/assembler.ts",
"retrieved_chunk": " program.currentAddress += 1;\n break;\n }\n }\n if (hasInstruction && program.unmatchedLabels.length > 0) {\n // Add queued labels\n for (const label of program.unmatchedLabels) {\n const existingLabel = program.matchedLabels[label.label];\n if (existingLabel) {\n log(",
"score": 20.414324951382245
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": "};\n/**\n * Reads and parses the BASS arch file\n * @param path The path of the arch file\n * @returns The InstructionSet resulting from parsing the arch file\n */\nexport const readArch = async (path: string): Promise<InstructionSet> => {\n const instructionSet: InstructionSet = {\n instructions: [],\n };",
"score": 16.721610143397424
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " // Force nothing but whitespace and a comment from instruction to end of string\n return new RegExp(\n instructionPrefixRegex.source + cleaned + instructionSuffixRegex.source\n );\n};",
"score": 16.491285768903285
}
] | typescript | labelUsageMap[instruction.address] = namedLabel.name; |
import fs, { readFileSync, writeFileSync } from "fs";
import { argv } from "process";
import readline from "readline";
import events from "events";
import { InstructionSet, parseArchLine } from "./lib/bass";
import { parseNumber } from "./lib/util";
import * as path from "path";
import { AssembledProgram } from "./lib/types";
import { commentRegex, labelRegex } from "./lib/regex";
import { outputInstructions } from "./lib/opcodeOutput";
import { log } from "./lib/log";
import { readArch, readByLines } from "./lib/fs";
interface ComamndEntry {
regex: RegExp;
action: (
line: { line: string; lineNumber: number },
matches: RegExpExecArray,
program: AssembledProgram
) => void;
}
// The commands supported by the assembler (separate from opcodes)
const commands: ComamndEntry[] = [
{
regex: /origin\s+((?:0x)?[a-f0-9]+)/,
action: ({ lineNumber }, [_2, address], program) => {
if (address === undefined) {
log("Could not parse origin", lineNumber);
return;
}
program.currentAddress = parseNumber(address);
},
},
{
regex: /constant\s+(?:(0x[a-f0-9]+|[0-9]+)|([a-z0-9_]+))/,
action: ({ line, lineNumber }, [_, constant, label], program) => {
const address = program.currentAddress;
if (constant !== undefined) {
const value = parseNumber(constant);
if (value > 4095) {
log(
`Constant ${constant} is too large to fit into 12 bits`,
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "constant",
subtype: "literal",
value,
line,
lineNumber,
address,
});
} else if (label !== undefined) {
program.matchedInstructions.push({
type: "constant",
subtype: "label",
label,
line,
lineNumber,
address,
});
} else {
log("Unknown constant error", lineNumber);
return;
}
program.currentAddress += 1;
},
},
];
const parseAsmLine = (
line: string,
lineNumber: number,
instructionSet: InstructionSet,
program: AssembledProgram
) => {
if (line.length == 0 || line.startsWith("//") || line.startsWith(";")) {
// Comment. Skip
return;
}
for (const command of commands) {
const matches = command.regex.exec(line);
if (!!matches && matches.length > 0) {
command.action({ lineNumber, line }, matches, program);
return;
}
}
let hasInstruction = false;
// Match line against all known instructions from the BASS arch
for (const instruction of instructionSet.instructions) {
const matches = instruction.regex.exec(line);
const address = program.currentAddress;
if (!!matches && matches.length > 0) {
if (matches[1] !== undefined) {
// immediate
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "immediate",
line,
immediate: parseNumber(matches[1]),
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else if (matches[2] !== undefined) {
// potential label
if (instruction.type !== "immediate") {
log(
"Attempted to match content with non-immediate instruction",
lineNumber
);
return;
}
program.matchedInstructions.push({
type: "label",
line,
label: matches[2],
opcodeString: instruction.opcodeString,
bitCount: instruction.immediate.bitCount,
lineNumber,
address,
});
} else {
// literal only
program.matchedInstructions.push({
type: "literal",
line,
opcodeString: instruction.opcodeString,
lineNumber,
address,
});
}
hasInstruction = true;
program.currentAddress += 1;
break;
}
}
if (hasInstruction && program.unmatchedLabels.length > 0) {
// Add queued labels
for (const label of program.unmatchedLabels) {
const existingLabel = program.matchedLabels[label.label];
if (existingLabel) {
log(
`Label "${label.label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
program.matchedLabels[label.label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
}
// We've processed all labels
program.unmatchedLabels = [];
}
let lineWithoutLabel = line;
const matches = labelRegex.exec(line);
if (!!matches && matches.length > 0 && matches[1]) {
lineWithoutLabel =
lineWithoutLabel.substring(0, matches.index) +
lineWithoutLabel.substring(matches.index + matches[0].length);
const label = matches[1];
const existingLabel = program.matchedLabels[label];
if (existingLabel) {
log(
`Label "${label}" already exists. Was created on line ${existingLabel.lineNumber}`,
lineNumber
);
return;
}
if (hasInstruction) {
// Instruction on this line, pair them up
program.matchedLabels[label] = {
lineNumber,
instructionIndex: program.matchedInstructions.length - 1,
address: program.currentAddress - 1,
};
} else {
// Will pair with some future instruction. Queue it
program.unmatchedLabels.push({
label,
lineNumber,
});
}
}
lineWithoutLabel = lineWithoutLabel.replace(commentRegex, "").trim();
if (!hasInstruction && lineWithoutLabel.length > 0) {
log(`Unknown instruction "${lineWithoutLabel}"`, lineNumber);
}
};
if (argv.length != 4 && argv.length != 5) {
console.log(`Received ${argv.length - 2} arguments. Expected 2-3\n`);
console.log(
"Usage: node assembler.js [input.asm] [output.bin] {true|false: 12 bit output}"
);
process.exit(1);
}
const archPath = path.join(__dirname, "../bass/6200.arch");
const inputFile = argv[2] as string;
const outputFile = argv[3] as string;
const word16Align = argv[4] !== "true";
const build = async () => {
const program: AssembledProgram = {
currentAddress: 0,
matchedInstructions: [],
matchedLabels: {},
unmatchedLabels: [],
};
const instructionSet = await readArch(archPath);
await | readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
); |
const outputBuffer = outputInstructions(program, word16Align);
if (outputBuffer.type === "some") {
writeFileSync(outputFile, outputBuffer.value);
} else {
console.log("Could not generate output binary");
}
};
build();
| src/assembler.ts | agg23-tamagotchi-disassembled-421eacb | [
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " await readByLines(path, (line, lineNumber) =>\n parseArchLine(line, lineNumber, instructionSet)\n );\n return instructionSet;\n};",
"score": 37.43083165632357
},
{
"filename": "src/disassembler.ts",
"retrieved_chunk": "const archPath = path.join(__dirname, \"../bass/6200.arch\");\nconst inputFile = argv[2] as string;\nconst outputFile = argv[3] as string;\nconst build = async () => {\n const instructionSet = await readArch(archPath);\n const sortedInstructions = instructionSet.instructions.sort(\n (a, b) => a.sortableOpcode - b.sortableOpcode\n );\n const buffer = readFileSync(inputFile);\n const outputString = parseBinaryBuffer(buffer, sortedInstructions);",
"score": 24.918671886661354
},
{
"filename": "src/lib/fs.ts",
"retrieved_chunk": " path: string,\n onLine: (line: string, lineNumber: number) => void\n) => {\n const rl = readline.createInterface({\n input: fs.createReadStream(path),\n crlfDelay: Infinity,\n });\n let lineNumber = 0;\n rl.on(\"line\", (line) => onLine(line.toLowerCase().trim(), ++lineNumber));\n await events.once(rl, \"close\");",
"score": 19.403992921265832
},
{
"filename": "src/lib/types.ts",
"retrieved_chunk": " >;\n matchedLabels: {\n [name: string]: {\n lineNumber: number;\n instructionIndex: number;\n address: number;\n };\n };\n unmatchedLabels: Array<{\n label: string;",
"score": 13.720784938778515
},
{
"filename": "src/lib/bass.ts",
"retrieved_chunk": " * @param config The global instruction set config\n * @returns\n */\nexport const parseArchLine = (\n line: string,\n lineNumber: number,\n config: InstructionSet\n) => {\n if (line.length == 0 || line.startsWith(\"//\") || line.startsWith(\"#\")) {\n // Comment. Skip",
"score": 12.483696738909522
}
] | typescript | readByLines(inputFile, (line, lineNumber) =>
parseAsmLine(line, lineNumber, instructionSet, program)
); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.