/** * * Copyright 2023-2025 InspectorRAGet Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **/ 'use client'; import { isEmpty } from 'lodash'; import DOMPurify from 'dompurify'; import parse from 'html-react-parser'; import { useMemo, useState } from 'react'; import { Tabs, TabList, Tab, TabPanels, TabPanel, ContainedList, ContainedListItem, } from '@carbon/react'; import { Model, TaskEvaluation, Task, Metric, MessageStep } from '@/src/types'; import { useDataStore } from '@/src/store'; import { truncate } from '@/src/utilities/strings'; import AnnotationsTable from '@/src/views/annotations-table/AnnotationsTable'; import ChatLine from '@/src/components/chatline/ChatLine'; import ChatTaskCopierModal from '@/src/components/task-copier/ChatTaskCopier'; import classes from './ChatTask.module.scss'; // =================================================================================== // TYPES // =================================================================================== interface Props { task: Task; models: Map; metrics: Metric[]; taskCopierModalOpen: boolean; setTaskCopierModalOpen: Function; updateCommentProvenance: Function; } // =================================================================================== // RENDER FUNCTIONS // =================================================================================== function Evaluation({ evaluation, hMetrics, aMetrics, }: { evaluation: TaskEvaluation; hMetrics: Map; aMetrics: Map; }) { return (
{evaluation.annotations && hMetrics.size ? ( <>
Human Evaluations:
) : null} {evaluation.annotations && aMetrics.size ? ( <>
Algorithmic Evaluations:
) : null}
); } function Steps({ steps }: { steps?: MessageStep[] }) { return (
{steps && !isEmpty(steps) ? ( <> ) : (

No steps information is available.

)}
); } // =================================================================================== // MAIN FUNCTION // =================================================================================== export default function ChatTask({ task, models, metrics, taskCopierModalOpen, setTaskCopierModalOpen, updateCommentProvenance, }: Props) { // Step 1: Initialize state and necessary variables const [selectedEvaluationIndex, setSelectedEvaluationIndex] = useState(0); // Step 2: Run effects // Step 2.a: Fetch data from data store const { item: data } = useDataStore(); // Step 2.b: Fetch documents and evaluations const evaluations = useMemo(() => { // Step 2.b.i: Fetch evaluations let taskEvaluations: TaskEvaluation[] | undefined = undefined; if (data) { taskEvaluations = data.evaluations.filter( (evaluation) => evaluation.taskId === task.taskId, ); } return taskEvaluations; }, [task.taskId, data]); // Step 2.c: Build human & algorithmic metric maps const [hMetrics, aMetrics] = useMemo(() => { const humanMetrics = new Map( metrics ?.filter((metric) => metric.author === 'human') .map((metric) => [metric.name, metric]), ); const algorithmicMetrics = new Map( metrics ?.filter((metric) => metric.author === 'algorithm') .map((metric) => [metric.name, metric]), ); return [humanMetrics, algorithmicMetrics]; }, [metrics]); // Step 3: Render return ( <> {models && metrics && task && evaluations && ( { setTaskCopierModalOpen(false); }} > )} {task && models && evaluations && ( <>
{Array.isArray(task.input) ? task.input.map((message, messageIdx) => ( )) : null}
{ setSelectedEvaluationIndex(e.selectedIndex); }} > {evaluations.map((evaluation) => ( {truncate( models.get(evaluation.modelId)?.name || evaluation.modelId, 15, )} ))} {evaluations.map((evaluation) => (
Model:
{models.get(evaluation.modelId)?.name || evaluation.modelId}
{ updateCommentProvenance( `${evaluation.modelId}::evaluation::response`, ); }} onMouseUp={() => updateCommentProvenance( `${evaluation.modelId}::evaluation::response`, ) } > {parse( DOMPurify.sanitize(evaluation.modelResponse), )}
{task.targets && !isEmpty(task.targets) ? ( {task.targets.length > 1 ? ( task.targets.map((target, targetIdx) => target.text ? ( Target {targetIdx + 1}: {target.text} ) : null, ) ) : ( {task.targets[0].text} )} ) : null} Evaluations Steps
))}
)} ); }