aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/client/apis/gpt/customization.ts214
-rw-r--r--src/client/apis/gpt/setup.ts30
-rw-r--r--src/client/views/ExtractColors.ts168
-rw-r--r--src/client/views/PropertiesView.scss15
-rw-r--r--src/client/views/PropertiesView.tsx114
-rw-r--r--src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx104
-rw-r--r--src/client/views/global/globalScripts.ts4
-rw-r--r--src/client/views/nodes/formattedText/FormattedTextBox.tsx28
-rw-r--r--src/client/views/nodes/trails/PresBox.scss22
-rw-r--r--src/client/views/nodes/trails/PresBox.tsx171
-rw-r--r--src/client/views/nodes/trails/PresElementBox.tsx15
-rw-r--r--src/client/views/pdf/GPTPopup/GPTPopup.scss4
-rw-r--r--src/client/views/pdf/GPTPopup/GPTPopup.tsx4
13 files changed, 884 insertions, 9 deletions
diff --git a/src/client/apis/gpt/customization.ts b/src/client/apis/gpt/customization.ts
new file mode 100644
index 000000000..42f112f88
--- /dev/null
+++ b/src/client/apis/gpt/customization.ts
@@ -0,0 +1,214 @@
+import { ChatCompletionMessageParam } from 'openai/resources';
+import { openai } from './setup';
+import { ClientOptions, OpenAI } from 'openai';
+
+export enum CustomizationType {
+ PRES_TRAIL_SLIDE = 'trails',
+}
+
+export interface GeneratedResponse {
+ collectionBackgroundColor: string;
+ documentsWithColors: DocumentWithColor[];
+}
+
+export interface DocumentWithColor {
+ id: number;
+ color: string;
+}
+
+export interface StyleInputDocument {
+ id: number;
+ textContent: string;
+ textSize: number;
+}
+
+export interface StyleInput {
+ collectionDescription: string;
+ documents: StyleInputDocument[];
+ imageColors: string[];
+}
+
+interface PromptInfo {
+ description: string;
+ features: { name: string; description: string; values?: string[] }[];
+}
+const prompts: { [key: string]: PromptInfo } = {
+ trails: {
+ description:
+ 'We are adding customization to a slide in a presentation. Given a natural language input, translate it into a json with the required fields: [title, presentation_transition, presentation_effect, config_zoom, presentation_effectDirection].',
+ features: [],
+ },
+};
+
+export const addCustomizationProperty = (type: CustomizationType, name: string, description: string, values?: string[]) => {
+ values ? prompts[type].features.push({ name, description, values }) : prompts[type].features.push({ name, description });
+};
+
+const setupPresSlideCustomization = () => {
+ addCustomizationProperty(CustomizationType.PRES_TRAIL_SLIDE, 'title', 'is the title/name of the slide.');
+ addCustomizationProperty(CustomizationType.PRES_TRAIL_SLIDE, 'presentation_transition', 'is a number in milliseconds for how long it should take to transition/move to a slide.');
+ addCustomizationProperty(CustomizationType.PRES_TRAIL_SLIDE, 'presentation_effect', 'is an effect applied to the slide when we transition to it.', ['None', 'Fade in', 'Flip', 'Rotate', 'Bounce', 'Roll']);
+};
+
+setupPresSlideCustomization();
+
+export const gptTrailSlideCustomization = async (inputText: string) => {
+ let prompt = prompts.trails.description;
+
+ prompts.trails.features.forEach(feature => {
+ prompt += feature.name + feature.description;
+ if (feature.values) {
+ prompt += `Its only possible values are [${feature.values.join(', ')}].`;
+ }
+ });
+
+ // prompt +=
+ // 'title is the title/name of the slide. presentation_transition is a number in milliseconds for how long it should take to transition/move to a slide. presentation_effect is an effect applied to the slide when we transition to it. Its only possible values are: [None, Fade in, Flip, Rotate, Bounce, Roll]. presentation_effectDirection is what direction the effect is applied. Its only possible values are: [Enter from left, Enter from right, Enter from bottom, Enter from Top, Enter from center]. config_zoom is a number from 0 to 1.0 indicating the percentage we should zoom into the slide.';
+
+ prompt += 'If the input does not contain info a specific key, please set their value to null. Please only return the json with these keys and their values.';
+
+ try {
+ const response = await openai.chat.completions.create({
+ model: 'gpt-3.5-turbo',
+ messages: [
+ { role: 'system', content: prompt },
+ { role: 'user', content: inputText },
+ ],
+ temperature: 0.1,
+ max_tokens: 1000,
+ });
+ return response.choices[0].message?.content;
+ } catch (err) {
+ console.log(err);
+ return 'Error connecting with API.';
+ }
+};
+
+// layout
+export const smartLayout = async (inputData: { width: number; height: number }[]) => {
+ let prompt =
+ 'I want to layout documents in a 2d space in a nice, grid-like fashion with nice padding of about 50 units around each document, making sure they do not overlap. Given a json array of documents containing their width and heights in this form: {width: number, height: number}[], give me a json array in this form: {x: number, y: number}[] corresponding to the documents with a nice layout. Return just the json array.';
+
+ let messages: ChatCompletionMessageParam[] = [
+ { role: 'system', content: prompt },
+ { role: 'user', content: JSON.stringify(inputData) },
+ ];
+
+ console.log('Prompt: ', prompt);
+ console.log('Messages: ', messages);
+
+ try {
+ const response = await openai.chat.completions.create({
+ model: 'gpt-4',
+ messages: messages,
+ temperature: 0.01,
+ max_tokens: 2000,
+ });
+ const content = response.choices[0].message?.content;
+ if (content) {
+ return content;
+ }
+ return 'Malformed response.';
+ } catch (err) {
+ console.log(err);
+ return 'Error connecting with API.';
+ }
+};
+
+// layout
+export const smartLayout2 = async (inputData: { width: number; height: number; content: string }[]) => {
+ let prompt =
+ 'I want to layout documents in a 2d space in an organized fashion with padding of about 50 units around each document, importantly making sure they do not overlap. I also want you to group documents by their content, with space separating semantic categories. Given a json array of documents containing their width and heights in this form: {width: number, height: number, content: string}[], give me a json array in this form: {x: number, y: number}[] corresponding to the documents in the same order with a nice layout. Return just the json array.';
+
+ let messages: ChatCompletionMessageParam[] = [
+ { role: 'system', content: prompt },
+ { role: 'user', content: JSON.stringify(inputData) },
+ ];
+
+ console.log('Prompt: ', prompt);
+ console.log('Messages: ', messages);
+
+ try {
+ const response = await openai.chat.completions.create({
+ model: 'gpt-4',
+ messages: messages,
+ temperature: 0.01,
+ max_tokens: 2000,
+ });
+ const content = response.choices[0].message?.content;
+ if (content) {
+ return content;
+ }
+ return 'Malformed response.';
+ } catch (err) {
+ console.log(err);
+ return 'Error connecting with API.';
+ }
+};
+
+// palette / styling
+export const generatePalette = async (inputData: StyleInput, useImageData: boolean, inputText: string, lastResponse?: GeneratedResponse[]) => {
+ let prompt = 'Dash is a hypermedia web application that allows users to organize documents of different media types into collections. The user wants you to come up with cohesive color palettes for a collection.';
+ prompt +=
+ ' The user is going to give you a json object of this format:' +
+ JSON.stringify({ collectionDescription: 'string', documents: 'Document[]', imageColors: 'string[]' }) +
+ '. The user may follow by giving more specific instructions on what kind of palettes they want. collectionDescription is the title of the collection, which you should create color palettes based on. This is the document format:' +
+ JSON.stringify({
+ id: 'number',
+ textSize: 'number',
+ textContent: 'string',
+ }) +
+ (useImageData ? '. Finally, imageColors are the main hex colors of the images in the collection.' : '. Ignore imageColors.') +
+ 'You are going to generate three distinct variants of color palettes for the user to choose from based mostly on collectionDescription, and loosely on the text content and text size of the documents.' +
+ (useImageData ? 'You should slightly take imageColors into account, but primarly focus on crafting a palette that matches the text content.' : '') +
+ 'The variants should start with a light palette and grow increasingly more intense and vibrant. Return a json array of three objects in this format:' +
+ JSON.stringify({
+ collectionBackgroundColor: 'string',
+ documentsWithColors: 'DocumentWithColor[]',
+ }) +
+ '. collectionBackgroundColor, should be a string hex value for the background color of the collection. documentsWithColors has the same length and order of the input documents. DocumentWithColor has this format:' +
+ JSON.stringify({
+ id: 'number',
+ color: 'string',
+ }) +
+ ", and each element’s color is based on the theme of the overall color palette and also by its document’s textContent. Please pay attention to aesthetics of how each document's color complement the background and each other and choose a variety of colors when appropriate.";
+
+ // enforce format
+ prompt += 'Important: Respond with only the JSON array and no other text.';
+
+ // iteration
+
+ let messages: ChatCompletionMessageParam[] = [
+ { role: 'system', content: prompt },
+ { role: 'user', content: JSON.stringify(inputData) },
+ ];
+
+ // continuing conversation
+ if (lastResponse && inputText !== '') {
+ messages.push({ role: 'assistant', content: JSON.stringify(lastResponse) });
+ messages.push({ role: 'user', content: 'Please modify the previously generated palettes with the following: ' + inputText });
+ } else if (inputText !== '') {
+ messages.push({ role: 'user', content: inputText });
+ }
+
+ console.log('Prompt: ', prompt);
+ console.log('Messages: ', messages);
+
+ try {
+ const response = await openai.chat.completions.create({
+ model: 'gpt-4',
+ messages: messages,
+ temperature: 0.1,
+ max_tokens: 2000,
+ });
+ const content = response.choices[0].message?.content;
+ console.log(content);
+ if (content) {
+ return content;
+ }
+ return 'Malformed response.';
+ } catch (err) {
+ console.log(err);
+ return 'Error connecting with API.';
+ }
+};
diff --git a/src/client/apis/gpt/setup.ts b/src/client/apis/gpt/setup.ts
new file mode 100644
index 000000000..831c97eaa
--- /dev/null
+++ b/src/client/apis/gpt/setup.ts
@@ -0,0 +1,30 @@
+// import { Configuration, OpenAIApi } from 'openai';
+import { ClientOptions, OpenAI } from 'openai';
+
+export enum GPTCallType {
+ SUMMARY = 'summary',
+ COMPLETION = 'completion',
+ EDIT = 'edit',
+}
+
+export type GPTCallOpts = {
+ model: string;
+ maxTokens: number;
+ temp: number;
+ prompt: string;
+};
+
+export const callTypeMap: { [type: string]: GPTCallOpts } = {
+ summary: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: 'Summarize this text in simpler terms: ' },
+ edit: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: 'Reword this: ' },
+ completion: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: '' },
+};
+
+const configuration: ClientOptions = {
+ apiKey: process.env.OPENAI_KEY,
+ dangerouslyAllowBrowser: true,
+};
+
+export const openai = new OpenAI(configuration);
+
+// export const openai = new OpenAIApi(configuration);
diff --git a/src/client/views/ExtractColors.ts b/src/client/views/ExtractColors.ts
new file mode 100644
index 000000000..f6928c52a
--- /dev/null
+++ b/src/client/views/ExtractColors.ts
@@ -0,0 +1,168 @@
+import { extractColors } from 'extract-colors';
+import { FinalColor } from 'extract-colors/lib/types/Color';
+
+// Manages image color extraction
+export class ExtractColors {
+ // loads all images into img elements
+ static loadImages = async (imageFiles: File[]): Promise<HTMLImageElement[]> => {
+ try {
+ const imageElements = await Promise.all(imageFiles.map(file => this.loadImage(file)));
+ return imageElements;
+ } catch (error) {
+ console.error(error);
+ return [];
+ }
+ };
+
+ // loads a single img into an img element
+ static loadImage = (file: File): Promise<HTMLImageElement> => {
+ return new Promise((resolve, reject) => {
+ const img = new Image();
+
+ img.onload = () => resolve(img);
+ img.onerror = error => reject(error);
+
+ const url = URL.createObjectURL(file);
+ img.src = url;
+ });
+ };
+
+ // loads all images into img elements
+ static loadImagesUrl = async (imageUrls: string[]): Promise<HTMLImageElement[]> => {
+ try {
+ const imageElements = await Promise.all(imageUrls.map(url => this.loadImageUrl(url)));
+ return imageElements;
+ } catch (error) {
+ console.error(error);
+ return [];
+ }
+ };
+
+ // loads a single img into an img element
+ static loadImageUrl = (url: string): Promise<HTMLImageElement> => {
+ return new Promise((resolve, reject) => {
+ const img = new Image();
+
+ img.onload = () => resolve(img);
+ img.onerror = error => reject(error);
+
+ img.src = url;
+ });
+ };
+
+ // extracts a list of collors from an img element
+ static getImgColors = async (img: HTMLImageElement) => {
+ const colors = await extractColors(img, { distance: 0.35 });
+ return colors;
+ };
+
+ static simpleSort = (colors: FinalColor[]): FinalColor[] => {
+ colors.sort((a, b) => {
+ if (a.hue !== b.hue) {
+ return b.hue - a.hue;
+ } else {
+ return b.saturation - a.saturation;
+ }
+ });
+ return colors;
+ };
+
+ static sortColors(colors: FinalColor[]): FinalColor[] {
+ // Convert color from RGB to CIELAB format
+ const convertToLab = (color: FinalColor): number[] => {
+ const r = color.red / 255;
+ const g = color.green / 255;
+ const b = color.blue / 255;
+
+ const x = r * 0.4124564 + g * 0.3575761 + b * 0.1804375;
+ const y = r * 0.2126729 + g * 0.7151522 + b * 0.072175;
+ const z = r * 0.0193339 + g * 0.119192 + b * 0.9503041;
+
+ const pivot = 0.008856;
+ const factor = 903.3;
+
+ const fx = x > pivot ? Math.cbrt(x) : (factor * x + 16) / 116;
+ const fy = y > pivot ? Math.cbrt(y) : (factor * y + 16) / 116;
+ const fz = z > pivot ? Math.cbrt(z) : (factor * z + 16) / 116;
+
+ const L = 116 * fy - 16;
+ const a = (fx - fy) * 500;
+ const b1 = (fy - fz) * 200;
+
+ return [L, a, b1];
+ };
+
+ // Sort colors using CIELAB distance for smooth transitions
+ colors.sort((colorA, colorB) => {
+ const labA = convertToLab(colorA);
+ const labB = convertToLab(colorB);
+
+ // Calculate Euclidean distance in CIELAB space
+ const distanceA = Math.sqrt(Math.pow(labA[0] - labB[0], 2) + Math.pow(labA[1] - labB[1], 2) + Math.pow(labA[2] - labB[2], 2));
+
+ const distanceB = Math.sqrt(Math.pow(labB[0] - labA[0], 2) + Math.pow(labB[1] - labA[1], 2) + Math.pow(labB[2] - labA[2], 2));
+
+ return distanceA - distanceB; // Sort by CIELAB distance
+ });
+
+ return colors;
+ }
+
+ static hexToFinalColor = (hex: string): FinalColor => {
+ const rgb = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
+
+ if (!rgb) {
+ throw new Error('Invalid hex color format.');
+ }
+
+ const red = parseInt(rgb[1], 16);
+ const green = parseInt(rgb[2], 16);
+ const blue = parseInt(rgb[3], 16);
+
+ const max = Math.max(red, green, blue);
+ const min = Math.min(red, green, blue);
+ const area = max - min;
+ const intensity = (max + min) / 2;
+
+ let hue = 0;
+ let saturation = 0;
+ let lightness = intensity;
+
+ if (area !== 0) {
+ saturation = area / (1 - Math.abs(2 * intensity - 1));
+ if (max === red) {
+ hue = (60 * ((green - blue) / area) + 360) % 360;
+ } else if (max === green) {
+ hue = (60 * ((blue - red) / area) + 120) % 360;
+ } else {
+ hue = (60 * ((red - green) / area) + 240) % 360;
+ }
+ }
+
+ return {
+ hex,
+ red,
+ green,
+ blue,
+ area,
+ hue,
+ saturation,
+ lightness,
+ intensity,
+ };
+ };
+}
+
+// for reference
+
+// type FinalColor = {
+// hex: string;
+// red: number;
+// green: number;
+// blue: number;
+// area: number;
+// hue: number;
+// saturation: number;
+// lightness: number;
+// intensity: number;
+// }
diff --git a/src/client/views/PropertiesView.scss b/src/client/views/PropertiesView.scss
index 8581bdf73..b21828aa7 100644
--- a/src/client/views/PropertiesView.scss
+++ b/src/client/views/PropertiesView.scss
@@ -7,6 +7,21 @@
position: absolute;
right: 4;
}
+.propertiesView-palette {
+ cursor: pointer;
+ padding: 8px;
+ border-radius: 4px;
+ transition: all 0.2s ease;
+ &:hover {
+ background-color: #3b3c3e;
+ }
+}
+.styling-chatbox {
+ color: #000000;
+ width: 100%;
+ outline: none;
+ border: none;
+}
.propertiesView {
height: 100%;
width: 250;
diff --git a/src/client/views/PropertiesView.tsx b/src/client/views/PropertiesView.tsx
index e4e7bec32..c3fdee001 100644
--- a/src/client/views/PropertiesView.tsx
+++ b/src/client/views/PropertiesView.tsx
@@ -2,7 +2,7 @@ import { IconLookup } from '@fortawesome/fontawesome-svg-core';
import { faAnchor, faArrowRight, faWindowMaximize } from '@fortawesome/free-solid-svg-icons';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { Checkbox, Tooltip } from '@mui/material';
-import { Colors, EditableText, IconButton, NumberInput, Size, Slider, Type } from 'browndash-components';
+import { Button, ColorPicker, Colors, EditableText, IconButton, NumberInput, Size, Slider, Type } from 'browndash-components';
import { concat } from 'lodash';
import { IReactionDisposer, action, computed, makeObservable, observable, reaction } from 'mobx';
import { observer } from 'mobx-react';
@@ -41,6 +41,11 @@ import { DocumentView, OpenWhere } from './nodes/DocumentView';
import { StyleProviderFuncType } from './nodes/FieldView';
import { KeyValueBox } from './nodes/KeyValueBox';
import { PresBox, PresEffect, PresEffectDirection } from './nodes/trails';
+import { ExtractColors } from './ExtractColors';
+import TextareaAutosize from 'react-textarea-autosize';
+import { GeneratedResponse, StyleInput, generatePalette } from '../apis/gpt/customization';
+import { FaFillDrip } from 'react-icons/fa';
+
import { LinkBox } from './nodes/LinkBox';
const _global = (window /* browser */ || global) /* node */ as any;
@@ -101,6 +106,58 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
@observable openAppearance: boolean = true;
@observable openTransform: boolean = true;
@observable openFilters: boolean = false;
+ @observable openStyling: boolean = true;
+
+ // GPT styling
+ public styleInput: StyleInput | undefined;
+ @observable loadingStyles: boolean = false;
+ @observable generatedStyles: GeneratedResponse[] = [];
+ @observable inputDocs: Doc[] = [];
+ @observable selectedStyle: number = -1;
+ @observable useImageData = false;
+
+ @observable chatInput: string = '';
+
+ @action
+ setChatInput = (input: string) => {
+ this.chatInput = input;
+ };
+
+ @action
+ setLoading = (loading: boolean) => {
+ this.loadingStyles = loading;
+ };
+
+ @action
+ gptStyling = async () => {
+ // this.generatedStyles = [];
+ this.selectedStyle = -1;
+ this.setLoading(true);
+ console.log('Style input: ', this.styleInput);
+
+ if (!this.styleInput) return;
+
+ try {
+ let res: any;
+ if (this.generatedStyles.length === 0) {
+ res = await generatePalette(this.styleInput, this.useImageData, this.chatInput);
+ } else {
+ res = await generatePalette(this.styleInput, this.useImageData, this.chatInput, this.generatedStyles);
+ }
+ if (typeof res === 'string') {
+ console.log('Generated palettes: ', res);
+ const resObj = JSON.parse(res) as GeneratedResponse[];
+ this.setGeneratedStyles(resObj);
+ }
+ } catch (err) {
+ console.error(err);
+ }
+ this.setLoading(false);
+ };
+
+ @action
+ setGeneratedStyles = (responses: GeneratedResponse[]) => (this.generatedStyles = responses);
+ setInputDocs = (docs: Doc[]) => (this.inputDocs = docs);
//Pres Trails booleans:
@observable openPresTransitions: boolean = true;
@@ -1139,6 +1196,60 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
}
};
+ @action
+ styleCollection = (i: number) => {
+ this.selectedStyle = i;
+ const resObj = this.generatedStyles[i];
+ if (this.selectedDoc && this.selectedDoc.type === 'collection') {
+ this.selectedDoc.backgroundColor = resObj.collectionBackgroundColor;
+ resObj.documentsWithColors.forEach((elem, i) => (this.inputDocs[i].backgroundColor = elem.color));
+ }
+ };
+
+ // GPT styling
+ @computed get stylingSubMenu() {
+ return (
+ <PropertiesSection title="Styling" isOpen={this.openStyling} setIsOpen={bool => (this.openStyling = bool)} onDoubleClick={() => this.CloseAll()}>
+ <div className="propertiesView-content" style={{ position: 'relative', height: 'auto', display: 'flex', flexDirection: 'column', alignItems: 'center', gap: '4px' }}>
+ {this.generatedStyles.length > 0 &&
+ this.generatedStyles.map((style, i) => (
+ <div
+ key={i}
+ className="propertiesView-palette"
+ style={{ display: 'flex', gap: '4px', backgroundColor: this.selectedStyle === i ? StrCast(Doc.UserDoc().userVariantColor) : '#00000000' }}
+ onClick={() => this.styleCollection(i)}>
+ <div style={{ width: '24px', height: '24px', backgroundColor: style.collectionBackgroundColor, borderRadius: '2px' }}></div>
+ {ExtractColors.sortColors(style.documentsWithColors.map(doc => ExtractColors.hexToFinalColor(doc.color))).map((c, i) => (
+ <div key={i} style={{ width: '24px', height: '24px', backgroundColor: c.hex, borderRadius: '2px' }}></div>
+ ))}
+ </div>
+ ))}
+ {this.loadingStyles && 'Generating styles...'}
+ <TextareaAutosize
+ minRows={3}
+ placeholder="Customize..."
+ className="styling-chatbox"
+ autoFocus={true}
+ value={this.chatInput}
+ onChange={e => {
+ this.setChatInput(e.target.value);
+ }}
+ onKeyDown={e => {
+ e.stopPropagation();
+ }}
+ />
+ <div style={{ display: 'flex', justifyContent: 'flex-end', gap: '16px' }}>
+ <div style={{ display: 'flex', gap: '4px', alignItems: 'center' }}>
+ <label style={{ margin: '0px' }}>Use Images </label>
+ <input style={{ margin: '0px' }} type="checkbox" checked={this.useImageData} onChange={action(e => (this.useImageData = e.target.checked))} />
+ </div>
+ <Button text={'Regenerate'} type={Type.TERT} color={StrCast(Doc.UserDoc().userVariantColor)} onClick={this.gptStyling} />
+ </div>
+ </div>
+ </PropertiesSection>
+ );
+ }
+
@computed get filtersSubMenu() {
return (
<PropertiesSection title="Filters" isOpen={this.openFilters} setIsOpen={bool => (this.openFilters = bool)} onDoubleClick={() => this.CloseAll()}>
@@ -1645,6 +1756,7 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
<div className="propertiesView-name">{this.editableTitle}</div>
<div className="propertiesView-type"> {this.currentType} </div>
+ {this.stylingSubMenu}
{this.fieldsSubMenu}
{this.optionsSubMenu}
{this.linksSubMenu}
diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
index a69030019..1e0840495 100644
--- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
+++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
@@ -27,7 +27,7 @@ import { ReplayMovements } from '../../../util/ReplayMovements';
import { CompileScript } from '../../../util/Scripting';
import { ScriptingGlobals } from '../../../util/ScriptingGlobals';
import { SelectionManager } from '../../../util/SelectionManager';
-import { freeformScrollMode } from '../../../util/SettingsManager';
+import { freeformScrollMode, SettingsManager } from '../../../util/SettingsManager';
import { SnappingManager } from '../../../util/SnappingManager';
import { Transform } from '../../../util/Transform';
import { undoBatch, UndoManager } from '../../../util/UndoManager';
@@ -54,6 +54,11 @@ import { CollectionFreeFormPannableContents } from './CollectionFreeFormPannable
import { CollectionFreeFormRemoteCursors } from './CollectionFreeFormRemoteCursors';
import './CollectionFreeFormView.scss';
import { MarqueeView } from './MarqueeView';
+import { PropertiesView } from '../../PropertiesView';
+import { ExtractColors } from '../../ExtractColors';
+import { smartLayout, smartLayout2, StyleInputDocument } from '../../../apis/gpt/customization';
+import { RichTextField } from '../../../../fields/RichTextField';
+import { extname } from 'path';
export interface collectionFreeformViewProps {
NativeWidth?: () => number;
@@ -1621,6 +1626,100 @@ export class CollectionFreeFormView extends CollectionSubView<Partial<collection
}
};
+ printDoc = (doc: Doc) => {
+ console.log('Printing keys');
+ Object.keys(doc).forEach(key => {
+ console.log(key, ':', doc[key]);
+ });
+ };
+
+ @action
+ openProperties = () => {
+ SettingsManager.Instance.propertiesWidth = 300;
+ };
+
+ choosePath(url: URL) {
+ if (!url?.href) return '';
+ const lower = url.href.toLowerCase();
+ if (url.protocol === 'data') return url.href;
+ if (url.href.indexOf(window.location.origin) === -1 && url.href.indexOf('dashblobstore') === -1) return Utils.CorsProxy(url.href);
+ if (!/\.(png|jpg|jpeg|gif|webp)$/.test(lower)) return `/assets/unknown-file-icon-hi.png`;
+
+ const ext = extname(url.href);
+ return url.href.replace(ext, '_m' + ext);
+ }
+
+ // gpt layout
+ @action
+ gptLayout = async () => {
+ const docLayouts = this.childDocs.map(doc => ({
+ width: NumCast(doc.width),
+ height: NumCast(doc.height),
+ content: StrCast(doc.title),
+ }));
+ console.log(docLayouts);
+
+ const res = await smartLayout2(docLayouts);
+ console.log('Smart layout result', res);
+ // make type-safe
+ const resObj = JSON.parse(res) as any[];
+ resObj.forEach((elem, i) => {
+ this.childDocs[i].x = elem.x;
+ this.childDocs[i].y = elem.y;
+ });
+
+ // refit collection
+ setTimeout(() => {
+ this.fitContentOnce();
+ }, 500);
+ };
+
+ // gpt styling
+ @action
+ gptStyling = async () => {
+ // clear it in properties instead
+ if (!PropertiesView.Instance) return;
+ this.openProperties();
+ PropertiesView.Instance.setGeneratedStyles([]);
+ PropertiesView.Instance.selectedStyle = -1;
+ PropertiesView.Instance.useImageData = false;
+
+ console.log('Title', this.Document.title);
+ console.log('bgcolor', this.layoutDoc._backgroundColor);
+ // doc.backgroundColor
+ const inputDocs = this.childDocs.filter(doc => doc.type == 'rich text');
+ const imgDocs = this.childDocs.filter(doc => doc.type == 'image');
+ const imgUrls = imgDocs.map(doc => this.choosePath((doc.data as ImageField).url));
+
+ const imageElements = await ExtractColors.loadImagesUrl(imgUrls);
+ const colors = await Promise.all(imageElements.map(elem => ExtractColors.getImgColors(elem)));
+ let colorHexes = colors
+ .reduce((acc, curr) => acc.concat(curr), [])
+ .map(color => color.hex)
+ .slice(0, 10);
+ console.log('Hexes', colorHexes);
+
+ PropertiesView.Instance?.setInputDocs(inputDocs);
+
+ // also pass it colors
+ const gptInput: StyleInputDocument[] = inputDocs.map((doc, i) => ({
+ id: i,
+ textContent: (doc.text as RichTextField)?.Text,
+ textSize: 16,
+ }));
+
+ const collectionDescription = StrCast(this.Document.title);
+
+ const styleInput = {
+ collectionDescription,
+ documents: gptInput,
+ imageColors: colorHexes,
+ };
+
+ PropertiesView.Instance.styleInput = styleInput;
+ PropertiesView.Instance.gptStyling();
+ };
+
onContextMenu = (e: React.MouseEvent) => {
if (this._props.isAnnotationOverlay || !ContextMenu.Instance) return;
@@ -1650,6 +1749,9 @@ export class CollectionFreeFormView extends CollectionSubView<Partial<collection
optionItems.push({ description: (this._showAnimTimeline ? 'Close' : 'Open') + ' Animation Timeline', event: action(() => (this._showAnimTimeline = !this._showAnimTimeline)), icon: 'eye' });
this._props.renderDepth && optionItems.push({ description: 'Use Background Color as Default', event: () => (Cast(Doc.UserDoc().emptyCollection, Doc, null)._backgroundColor = StrCast(this.layoutDoc._backgroundColor)), icon: 'palette' });
this._props.renderDepth && optionItems.push({ description: 'Fit Content Once', event: this.fitContentOnce, icon: 'object-group' });
+ // Want to condense into a Smart Organize button
+ this._props.renderDepth && optionItems.push({ description: 'Style with AI', event: this.gptStyling, icon: 'paint-brush' });
+ this._props.renderDepth && optionItems.push({ description: 'Smart Layout', event: this.gptLayout, icon: 'object-group' });
if (!Doc.noviceMode) {
optionItems.push({ description: (!Doc.NativeWidth(this.layoutDoc) || !Doc.NativeHeight(this.layoutDoc) ? 'Freeze' : 'Unfreeze') + ' Aspect', event: this.toggleNativeDimensions, icon: 'snowflake' });
}
diff --git a/src/client/views/global/globalScripts.ts b/src/client/views/global/globalScripts.ts
index 51672513b..4531c8296 100644
--- a/src/client/views/global/globalScripts.ts
+++ b/src/client/views/global/globalScripts.ts
@@ -32,6 +32,10 @@ ScriptingGlobals.add(function setView(view: string) {
selected ? (selected._type_collection = view) : console.log('[FontIconBox.tsx] changeView failed');
});
+ScriptingGlobals.add(function setSettingBgColor(isSetting: boolean) {
+ Doc.UserDoc().settingBgColor = isSetting;
+});
+
// toggle: Set overlay status of selected document
ScriptingGlobals.add(function setBackgroundColor(color?: string, checkResult?: boolean) {
const selectedViews = SelectionManager.Views;
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
index 56008de8e..40acf164f 100644
--- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx
+++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
@@ -67,6 +67,7 @@ import { RichTextMenu, RichTextMenuPlugin } from './RichTextMenu';
import { RichTextRules } from './RichTextRules';
import { schema } from './schema_rts';
import { SummaryView } from './SummaryView';
+import { isDarkMode } from '../../../util/reportManager/reportManagerUtils';
import Select from 'react-select';
// import * as applyDevTools from 'prosemirror-dev-tools';
@observer
@@ -985,13 +986,12 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
try {
let res = await gptAPICall((this.dataDoc.text as RichTextField)?.Text, GPTCallType.COMPLETION);
if (!res) {
- console.error('GPT call failed');
this.animateRes(0, 'Something went wrong.');
} else {
this.animateRes(0, res);
}
} catch (err) {
- console.error('GPT call failed');
+ console.error(err);
this.animateRes(0, 'Something went wrong.');
}
});
@@ -1005,6 +1005,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
};
breakupDictation = () => {
+ console.log('breakup');
if (this._editorView && this._recordingDictation) {
this.stopDictation(true);
this._break = true;
@@ -1187,6 +1188,25 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
@computed get contentScaling() {
return Doc.NativeAspect(this.Document, this.dataDoc, false) ? this._props.NativeDimScaling?.() || 1 : 1;
}
+
+ @action
+ checkBackgroundColor() {
+ console.log('checking bg color 1');
+ if (BoolCast(Doc.UserDoc().settingBgColor)) return;
+ console.log('checking bg color 2');
+ let fontColor = '#000000';
+ if (isDarkMode(StrCast(this.Document._backgroundColor))) {
+ fontColor = '#ffffff';
+ }
+ // set text to white
+ if (!this._editorView) return;
+ const tr = this._editorView?.state.tr;
+
+ tr.setSelection(TextSelection.create(tr.doc, 0, tr.doc.content.size));
+ tr.addMark(0, tr.doc.content.size, schema.marks.pFontColor.create({ color: fontColor }));
+ this._editorView.dispatch(tr);
+ }
+
componentDidMount() {
!this._props.dontSelectOnLoad && this._props.setContentViewBox?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
this._cachedLinks = LinkManager.Links(this.Document);
@@ -1209,6 +1229,10 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
({ width, scrollHeight, layout_autoHeight }) => width && layout_autoHeight && this.resetNativeHeight(scrollHeight),
{ fireImmediately: true }
);
+ this._disposers.bgColor = reaction(
+ () => this.Document._backgroundColor,
+ color => this.checkBackgroundColor()
+ );
this._disposers.componentHeights = reaction(
// set the document height when one of the component heights changes and layout_autoHeight is on
() => ({ sidebarHeight: this.sidebarHeight, textHeight: this.textHeight, layout_autoHeight: this.layout_autoHeight, marginsHeight: this.layout_autoHeightMargins }),
diff --git a/src/client/views/nodes/trails/PresBox.scss b/src/client/views/nodes/trails/PresBox.scss
index 3b34a1f90..1537ad0b8 100644
--- a/src/client/views/nodes/trails/PresBox.scss
+++ b/src/client/views/nodes/trails/PresBox.scss
@@ -15,6 +15,28 @@
//overflow: hidden;
transition: 0.7s opacity ease;
+ .presBox-chatbox {
+ position: fixed;
+ bottom: 8px;
+ left: 8px;
+ width: calc(100% - 16px);
+ min-height: 100px;
+ border-radius: 16px;
+ padding: 16px;
+ gap: 8px;
+ z-index: 999;
+ display: flex;
+ flex-direction: column;
+ justify-content: space-between;
+ background-color: #ffffff;
+ box-shadow: 0 2px 5px #7474748d;
+
+ .pres-chatbox {
+ outline: none;
+ border: none;
+ }
+ }
+
.presBox-listCont {
position: relative;
height: calc(100% - 67px);
diff --git a/src/client/views/nodes/trails/PresBox.tsx b/src/client/views/nodes/trails/PresBox.tsx
index b8f6575dd..1b1b65e46 100644
--- a/src/client/views/nodes/trails/PresBox.tsx
+++ b/src/client/views/nodes/trails/PresBox.tsx
@@ -35,7 +35,14 @@ import { DocumentView, OpenWhere, OpenWhereMod } from '../DocumentView';
import { FocusViewOptions, FieldView, FieldViewProps } from '../FieldView';
import { ScriptingBox } from '../ScriptingBox';
import './PresBox.scss';
+import ReactLoading from 'react-loading';
import { PresEffect, PresEffectDirection, PresMovement, PresStatus } from './PresEnums';
+import ReactTextareaAutosize from 'react-textarea-autosize';
+import { IconButton, Type } from 'browndash-components';
+import { BiMicrophone, BiX } from 'react-icons/bi';
+import { AiOutlineSend } from 'react-icons/ai';
+import { gptTrailSlideCustomization } from '../../../apis/gpt/customization';
+import { DictationManager } from '../../../util/DictationManager';
export interface pinDataTypes {
scrollable?: boolean;
dataviz?: number[];
@@ -103,7 +110,37 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
@observable _treeViewMap: Map<Doc, number> = new Map();
@observable _presKeyEvents: boolean = false;
@observable _forceKeyEvents: boolean = false;
- @computed get isTreeOrStack() {
+
+ // GPT
+ private _inputref: HTMLTextAreaElement | null = null;
+ @observable chatActive: boolean = false;
+ @observable chatInput: string = '';
+ public slideToModify: Doc | null = null;
+ @observable isRecording: boolean = false;
+ @observable isLoading: boolean = false;
+
+ @action
+ setChatInput = (input: string) => {
+ this.chatInput = input;
+ };
+
+ @action
+ setIsLoading = (isLoading: boolean) => {
+ this.isLoading = isLoading;
+ };
+
+ @action
+ public setChatActive = (active: boolean) => {
+ this.chatActive = active;
+ };
+
+ @action
+ public setIsRecording = (isRecording: boolean) => {
+ this.isRecording = isRecording;
+ };
+
+ @computed
+ get isTreeOrStack() {
return [CollectionViewType.Tree, CollectionViewType.Stacking].includes(StrCast(this.layoutDoc._type_collection) as any);
}
@computed get isTree() {
@@ -226,6 +263,73 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
}
};
+ // GPT
+
+ recordDictation = () => {
+ this.setIsRecording(true);
+ this.setChatInput('');
+ DictationManager.Controls.listen({
+ interimHandler: this.setDictationContent,
+ continuous: { indefinite: false },
+ }).then(results => {
+ if (results && [DictationManager.Controls.Infringed].includes(results)) {
+ DictationManager.Controls.stop();
+ }
+ });
+ };
+ stopDictation = (abort: boolean) => {
+ this.setIsRecording(false);
+ DictationManager.Controls.stop(!abort);
+ };
+
+ setDictationContent = (value: string) => {
+ this.setChatInput(value);
+ // // Get the current cursor position
+ // if (!this._inputref) return;
+ // const cursorPosition = this._inputref.selectionStart;
+ // const currentValue = this.chatInput;
+
+ // // split before and after
+ // const textBeforeCursor = currentValue.slice(0, cursorPosition);
+ // const textAfterCursor = currentValue.slice(cursorPosition);
+
+ // // insertion
+ // const updatedText = textBeforeCursor + value + textAfterCursor;
+
+ // // Update the textarea value
+ // this.setChatInput(updatedText);
+
+ // // set new cursor pos
+ // const newCursorPosition = cursorPosition + value.length;
+ // this._inputref.setSelectionRange(newCursorPosition, newCursorPosition);
+ };
+
+ @action
+ customizeWithGPT = async (input: string) => {
+ console.log(this.slideToModify);
+ // const testInput = 'change title to Customized Slide, transition for 2.3s with fade in effect';
+ // if (!this.slideToModify) return;
+ this.setIsRecording(false);
+ this.setIsLoading(true);
+ try {
+ const res = await gptTrailSlideCustomization(input);
+ console.log('slide result', res);
+ // if (typeof res === 'string') {
+ // const resObj = JSON.parse(res);
+ // console.log('Result ', resObj);
+ // // this.activeItem
+ // for (let key in resObj) {
+ // if (resObj[key]) {
+ // this.slideToModify[key] = resObj[key];
+ // }
+ // }
+ // }
+ } catch (err) {
+ console.error(err);
+ }
+ this.setIsLoading(false);
+ };
+
//TODO: al: it seems currently that tempMedia doesn't stop onslidechange after clicking the button; the time the tempmedia stop depends on the start & end time
// TODO: to handle child slides (entering into subtrail and exiting), also the next() and back() functions
// No more frames in current doc and next slide is defined, therefore move to next slide
@@ -734,6 +838,9 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
*/
navigateToActiveItem = (afterNav?: () => void) => {
const activeItem: Doc = this.activeItem;
+ console.log('active item', activeItem);
+ // GPT update
+ this.slideToModify = activeItem;
const targetDoc: Doc = this.targetDoc;
const finished = () => {
afterNav?.();
@@ -1191,6 +1298,7 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
@action
keyEvents = (e: KeyboardEvent) => {
if (e.target instanceof HTMLInputElement) return;
+ if (e.target instanceof HTMLTextAreaElement) return;
let handled = false;
const anchorNode = document.activeElement as HTMLDivElement;
if (anchorNode && anchorNode.className?.includes('lm_title')) return;
@@ -2625,7 +2733,6 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
/>
) : null}
</div>
-
{/* {
// if the document type is a presentation, then the collection stacking view has a "+ new slide" button at the bottom of the stack
<Tooltip title={<div className="dash-tooltip">{'Click on document to pin to presentaiton or make a marquee selection to pin your desired view'}</div>}>
@@ -2635,6 +2742,66 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
</Tooltip>
} */}
</div>
+ {/* presbox chatbox */}
+ {this.chatActive && (
+ <div className="presBox-chatbox">
+ <div style={{ alignSelf: 'flex-end' }}>
+ {this.isLoading ? (
+ <ReactLoading type="spin" color={StrCast(Doc.UserDoc().userVariantColor)} width={14} height={14} />
+ ) : (
+ <IconButton
+ type={Type.PRIM}
+ color={StrCast(Doc.UserDoc().userVariantColor)}
+ tooltip="Close"
+ icon={<BiX size={'16px'} />}
+ onClick={() => {
+ this.setChatActive(false);
+ }}
+ />
+ )}
+ </div>
+ <ReactTextareaAutosize
+ ref={r => (this._inputref = r)}
+ minRows={3}
+ placeholder="Customize..."
+ className="pres-chatbox"
+ autoFocus={true}
+ value={this.chatInput}
+ onChange={e => {
+ this.setChatInput(e.target.value);
+ }}
+ onKeyDown={e => {
+ this.stopDictation(true);
+ e.stopPropagation();
+ }}
+ />
+ {/* <input className="chatbox" placeholder="Customize..." value={this.chatInput} onChange={e => this.setChatInput(e.target.value)} /> */}
+ <div style={{ alignSelf: 'flex-end', display: 'flex', gap: '8px' }}>
+ <IconButton
+ type={Type.TERT}
+ color={this.isRecording ? 'red' : StrCast(Doc.UserDoc().userVariantColor)}
+ tooltip="Record"
+ icon={<BiMicrophone size={'16px'} />}
+ onClick={() => {
+ if (!this.isRecording) {
+ this.recordDictation();
+ } else {
+ this.stopDictation(true);
+ }
+ }}
+ />
+ <IconButton
+ type={Type.TERT}
+ color={!this.isLoading ? StrCast(Doc.UserDoc().userVariantColor) : '#7c7c7c'}
+ tooltip="Send"
+ icon={<AiOutlineSend size={'16px'} />}
+ onClick={() => {
+ this.customizeWithGPT(this.chatInput);
+ }}
+ />
+ </div>
+ </div>
+ )}
</div>
);
}
diff --git a/src/client/views/nodes/trails/PresElementBox.tsx b/src/client/views/nodes/trails/PresElementBox.tsx
index 5b2aa1cde..ed2f25fb6 100644
--- a/src/client/views/nodes/trails/PresElementBox.tsx
+++ b/src/client/views/nodes/trails/PresElementBox.tsx
@@ -409,6 +409,8 @@ export class PresElementBox extends ViewBoxBaseComponent<FieldViewProps>() {
return presBoxDocView ? presBoxDocView._props.PanelWidth() : width ? width : 300;
}
+ // GPT
+
@computed get presButtons() {
const presBox = this.presBox;
const presBoxColor = StrCast(presBox?._backgroundColor);
@@ -512,6 +514,19 @@ export class PresElementBox extends ViewBoxBaseComponent<FieldViewProps>() {
</div>
</Tooltip>
);
+ items.push(
+ <Tooltip key="customize" title={<div className="dash-tooltip">Customize</div>}>
+ <div
+ className={'slideButton'}
+ onClick={() => {
+ PresBox.Instance.setChatActive(true);
+ PresBox.Instance.slideToModify = this.rootDoc;
+ PresBox.Instance.recordDictation();
+ }}>
+ <FontAwesomeIcon icon={'message'} onPointerDown={e => e.stopPropagation()} />
+ </div>
+ </Tooltip>
+ );
return items;
}
diff --git a/src/client/views/pdf/GPTPopup/GPTPopup.scss b/src/client/views/pdf/GPTPopup/GPTPopup.scss
index 5d966395c..48659d0e7 100644
--- a/src/client/views/pdf/GPTPopup/GPTPopup.scss
+++ b/src/client/views/pdf/GPTPopup/GPTPopup.scss
@@ -11,8 +11,8 @@ $highlightedText: #82e0ff;
right: 10px;
width: 250px;
min-height: 200px;
- border-radius: 15px;
- padding: 15px;
+ border-radius: 16px;
+ padding: 16px;
padding-bottom: 0;
z-index: 999;
display: flex;
diff --git a/src/client/views/pdf/GPTPopup/GPTPopup.tsx b/src/client/views/pdf/GPTPopup/GPTPopup.tsx
index da8a88803..42562986f 100644
--- a/src/client/views/pdf/GPTPopup/GPTPopup.tsx
+++ b/src/client/views/pdf/GPTPopup/GPTPopup.tsx
@@ -119,13 +119,15 @@ export class GPTPopup extends ObservableReactComponent<GPTPopupProps> {
try {
let image_urls = await gptImageCall(this.imgDesc);
if (image_urls && image_urls[0]) {
+ // need to fix this
const [result] = await Networking.PostToServer('/uploadRemoteImage', { sources: [image_urls[0]] });
+ console.log('Result', result);
+ console.log('Client', result.accessPaths.agnostic.client);
const source = Utils.prepend(result.accessPaths.agnostic.client);
this.setImgUrls([[image_urls[0], source]]);
}
} catch (err) {
console.log(err);
- return '';
}
this.setLoading(false);
};