aboutsummaryrefslogtreecommitdiff
path: root/src/client/views/nodes/chatbot/tools/TakeQuizTool.ts
diff options
context:
space:
mode:
Diffstat (limited to 'src/client/views/nodes/chatbot/tools/TakeQuizTool.ts')
-rw-r--r--src/client/views/nodes/chatbot/tools/TakeQuizTool.ts88
1 files changed, 88 insertions, 0 deletions
diff --git a/src/client/views/nodes/chatbot/tools/TakeQuizTool.ts b/src/client/views/nodes/chatbot/tools/TakeQuizTool.ts
new file mode 100644
index 000000000..f025e95cd
--- /dev/null
+++ b/src/client/views/nodes/chatbot/tools/TakeQuizTool.ts
@@ -0,0 +1,88 @@
+import { BaseTool } from './BaseTool';
+import { Observation } from '../types/types';
+import { ParametersType, ToolInfo } from '../types/tool_types';
+import { AgentDocumentManager } from '../utils/AgentDocumentManager';
+import { GPTCallType, gptAPICall } from '../../../../apis/gpt/GPT';
+import { v4 as uuidv4 } from 'uuid';
+
+const parameterRules = [
+ {
+ name: 'userAnswer',
+ type: 'string',
+ description: 'User-provided answer to the quiz question.',
+ required: true,
+ },
+] as const;
+
+const toolInfo: ToolInfo<typeof parameterRules> = {
+ name: 'takeQuiz',
+ description:
+ 'Evaluates a user\'s answer for a randomly selected document using GPT, mirroring GPTPopup\'s quiz functionality.',
+ parameterRules,
+ citationRules: 'No citation needed for quiz operations.',
+};
+
+export class TakeQuizTool extends BaseTool<typeof parameterRules> {
+ private _docManager: AgentDocumentManager;
+
+ constructor(docManager: AgentDocumentManager) {
+ super(toolInfo);
+ this._docManager = docManager;
+ this._docManager.initializeFindDocsFreeform();
+ }
+
+ private async generateRubric(docId: string, description: string): Promise<string> {
+ const docMeta = this._docManager.extractDocumentMetadata(docId);
+ if (docMeta && docMeta.fields.layout.gptRubric) {
+ return docMeta.fields.layout.gptRubric;
+ } else {
+ const rubric = await gptAPICall(description, GPTCallType.MAKERUBRIC);
+ if (rubric) {
+ await this._docManager.editDocumentField(docId, 'layout.gptRubric', rubric);
+ }
+ return rubric || '';
+ }
+ }
+
+ async execute(args: ParametersType<typeof parameterRules>): Promise<Observation[]> {
+ const chunkId = uuidv4();
+
+ try {
+ const allDocIds = this._docManager.docIds;
+ const randomDocId = allDocIds[Math.floor(Math.random() * allDocIds.length)];
+ const docMeta = this._docManager.extractDocumentMetadata(randomDocId);
+
+ if (!docMeta) throw new Error('Randomly selected document metadata is undefined');
+
+ const description = docMeta.fields.layout.description.replace(/\n/g, ' ').trim();
+ const rubric = await this.generateRubric(randomDocId, description);
+
+ const prompt = `
+ Question: ${description};
+ UserAnswer: ${args.userAnswer};
+ Rubric: ${rubric}
+ `;
+
+ const evaluation = await gptAPICall(prompt, GPTCallType.QUIZDOC);
+
+ return [
+ {
+ type: 'text',
+ text: `<chunk chunk_id="${chunkId}" chunk_type="quiz_result">
+Evaluation result: ${evaluation || 'GPT provided no answer'}.
+Document evaluated: "${docMeta.title}"
+</chunk>`,
+ },
+ ];
+ } catch (err) {
+ return [
+ {
+ type: 'text',
+ text: `<chunk chunk_id="${chunkId}" chunk_type="error">
+Quiz evaluation failed: ${err instanceof Error ? err.message : err}
+</chunk>`,
+ },
+ ];
+ }
+ }
+}