aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/client/apis/gpt/customization.ts150
-rw-r--r--src/client/apis/gpt/setup.ts26
-rw-r--r--src/client/views/ExtractColors.ts168
-rw-r--r--src/client/views/PropertiesView.scss15
-rw-r--r--src/client/views/PropertiesView.tsx149
-rw-r--r--src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx18
-rw-r--r--src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx108
-rw-r--r--src/client/views/global/globalScripts.ts4
-rw-r--r--src/client/views/nodes/formattedText/FormattedTextBox.tsx27
-rw-r--r--src/client/views/nodes/trails/PresBox.scss22
-rw-r--r--src/client/views/nodes/trails/PresBox.tsx162
-rw-r--r--src/client/views/nodes/trails/PresElementBox.tsx15
-rw-r--r--src/client/views/pdf/GPTPopup/GPTPopup.scss4
-rw-r--r--src/client/views/pdf/GPTPopup/GPTPopup.tsx4
14 files changed, 836 insertions, 36 deletions
diff --git a/src/client/apis/gpt/customization.ts b/src/client/apis/gpt/customization.ts
new file mode 100644
index 000000000..135b83353
--- /dev/null
+++ b/src/client/apis/gpt/customization.ts
@@ -0,0 +1,150 @@
+import { ChatCompletionRequestMessage } from 'openai';
+import { openai } from './setup';
+
+export enum CustomizationType {
+ PRES_TRAIL_SLIDE = 'trails',
+}
+
+export interface GeneratedResponse {
+ collectionBackgroundColor: string;
+ documentsWithColors: DocumentWithColor[];
+}
+
+export interface DocumentWithColor {
+ id: number;
+ color: string;
+}
+
+export interface StyleInputDocument {
+ id: number;
+ textContent: string;
+ textSize: number;
+}
+
+export interface StyleInput {
+ collectionDescription: string;
+ documents: StyleInputDocument[];
+ imageColors: string[];
+}
+
+interface PromptInfo {
+ description: string;
+ features: { name: string; description: string; values?: string[] }[];
+}
+const prompts: { [key: string]: PromptInfo } = {
+ trails: {
+ description:
+ 'We are adding customization to a slide in a presentation. Given a natural language input, translate it into a json with the required fields: [title, presentation_transition, presentation_effect, config_zoom, presentation_effectDirection].',
+ features: [],
+ },
+};
+
+export const addCustomizationProperty = (type: CustomizationType, name: string, description: string, values?: string[]) => {
+ values ? prompts[type].features.push({ name, description, values }) : prompts[type].features.push({ name, description });
+};
+
+const setupPresSlideCustomization = () => {
+ addCustomizationProperty(CustomizationType.PRES_TRAIL_SLIDE, 'title', 'is the title/name of the slide.');
+ addCustomizationProperty(CustomizationType.PRES_TRAIL_SLIDE, 'presentation_transition', 'is a number in milliseconds for how long it should take to transition/move to a slide.');
+ addCustomizationProperty(CustomizationType.PRES_TRAIL_SLIDE, 'presentation_effect', 'is an effect applied to the slide when we transition to it.', ['None', 'Fade in', 'Flip', 'Rotate', 'Bounce', 'Roll']);
+};
+
+setupPresSlideCustomization();
+
+export const gptTrailSlideCustomization = async (inputText: string) => {
+ let prompt = prompts.trails.description;
+
+ prompts.trails.features.forEach(feature => {
+ prompt += feature.name + feature.description;
+ if (feature.values) {
+ prompt += `Its only possible values are [${feature.values.join(', ')}].`;
+ }
+ });
+
+ // prompt +=
+ // 'title is the title/name of the slide. presentation_transition is a number in milliseconds for how long it should take to transition/move to a slide. presentation_effect is an effect applied to the slide when we transition to it. Its only possible values are: [None, Fade in, Flip, Rotate, Bounce, Roll]. presentation_effectDirection is what direction the effect is applied. Its only possible values are: [Enter from left, Enter from right, Enter from bottom, Enter from Top, Enter from center]. config_zoom is a number from 0 to 1.0 indicating the percentage we should zoom into the slide.';
+
+ prompt += 'If the input does not contain info a specific key, please set their value to null. Please only return the json with these keys and their values.';
+
+ try {
+ const response = await openai.createChatCompletion({
+ model: 'gpt-3.5-turbo',
+ messages: [
+ { role: 'system', content: prompt },
+ { role: 'user', content: inputText },
+ ],
+ temperature: 0.1,
+ max_tokens: 1000,
+ });
+ return response.data.choices[0].message?.content;
+ } catch (err) {
+ console.log(err);
+ return 'Error connecting with API.';
+ }
+};
+
+// palette / styling
+export const generatePalette = async (inputData: StyleInput, useImageData: boolean, inputText: string, lastResponse?: GeneratedResponse[]) => {
+ let prompt = 'Dash is a hypermedia web application that allows users to organize documents of different media types into collections. The user wants you to come up with cohesive color palettes for a collection.';
+ prompt +=
+ ' The user is going to give you a json object of this format:' +
+ JSON.stringify({ collectionDescription: 'string', documents: 'Document[]', imageColors: 'string[]' }) +
+ '. The user may follow by giving more specific instructions on what kind of palettes they want. collectionDescription is the title of the collection, which you should create color palettes based on. This is the document format:' +
+ JSON.stringify({
+ id: 'number',
+ textSize: 'number',
+ textContent: 'string',
+ }) +
+ (useImageData ? '. Finally, imageColors are the main hex colors of the images in the collection.' : '. Ignore imageColors.') +
+ 'You are going to generate three distinct variants of color palettes for the user to choose from based mostly on collectionDescription, and loosely on the text content and text size of the documents.' +
+ (useImageData ? 'You should slightly take imageColors into account, but primarly focus on crafting a palette that matches the text content.' : '') +
+ 'The variants should start with a light palette and grow increasingly more intense and vibrant. Return a json array of three objects in this format:' +
+ JSON.stringify({
+ collectionBackgroundColor: 'string',
+ documentsWithColors: 'DocumentWithColor[]',
+ }) +
+ '. collectionBackgroundColor, should be a string hex value for the background color of the collection. documentsWithColors has the same length and order of the input documents. DocumentWithColor has this format:' +
+ JSON.stringify({
+ id: 'number',
+ color: 'string',
+ }) +
+ ", and each element’s color is based on the theme of the overall color palette and also by its document’s textContent. Please pay attention to aesthetics of how each document's color complement the background and each other and choose a variety of colors when appropriate.";
+
+ // enforce format
+ prompt += 'Important: Respond with only the JSON array and no other text.';
+
+ // iteration
+
+ let messages: ChatCompletionRequestMessage[] = [
+ { role: 'system', content: prompt },
+ { role: 'user', content: JSON.stringify(inputData) },
+ ];
+
+ if (lastResponse && inputText !== '') {
+ messages.push({ role: 'assistant', content: JSON.stringify(lastResponse) });
+ messages.push({ role: 'user', content: 'Please modify the previously generated palettes with the following: ' + inputText });
+ } else if (inputText !== '') {
+ messages.push({ role: 'user', content: inputText });
+ }
+
+ console.log('Prompt: ', prompt);
+ console.log('Messages: ', messages);
+
+ try {
+ const response = await openai.createChatCompletion({
+ model: 'gpt-4',
+ messages: messages,
+ temperature: 0.1,
+ max_tokens: 2000,
+ });
+ const content = response.data.choices[0].message?.content;
+ console.log(content);
+ if (content) {
+ return content;
+ }
+ return 'Malformed response.';
+ } catch (err) {
+ console.log(err);
+ return 'Error connecting with API.';
+ }
+};
diff --git a/src/client/apis/gpt/setup.ts b/src/client/apis/gpt/setup.ts
new file mode 100644
index 000000000..d1db6968a
--- /dev/null
+++ b/src/client/apis/gpt/setup.ts
@@ -0,0 +1,26 @@
+import { Configuration, OpenAIApi } from 'openai';
+
+export enum GPTCallType {
+ SUMMARY = 'summary',
+ COMPLETION = 'completion',
+ EDIT = 'edit',
+}
+
+export type GPTCallOpts = {
+ model: string;
+ maxTokens: number;
+ temp: number;
+ prompt: string;
+};
+
+export const callTypeMap: { [type: string]: GPTCallOpts } = {
+ summary: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: 'Summarize this text in simpler terms: ' },
+ edit: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: 'Reword this: ' },
+ completion: { model: 'text-davinci-003', maxTokens: 256, temp: 0.5, prompt: '' },
+};
+
+const configuration = new Configuration({
+ apiKey: process.env.OPENAI_KEY,
+});
+
+export const openai = new OpenAIApi(configuration);
diff --git a/src/client/views/ExtractColors.ts b/src/client/views/ExtractColors.ts
new file mode 100644
index 000000000..f6928c52a
--- /dev/null
+++ b/src/client/views/ExtractColors.ts
@@ -0,0 +1,168 @@
+import { extractColors } from 'extract-colors';
+import { FinalColor } from 'extract-colors/lib/types/Color';
+
+// Manages image color extraction
+export class ExtractColors {
+ // loads all images into img elements
+ static loadImages = async (imageFiles: File[]): Promise<HTMLImageElement[]> => {
+ try {
+ const imageElements = await Promise.all(imageFiles.map(file => this.loadImage(file)));
+ return imageElements;
+ } catch (error) {
+ console.error(error);
+ return [];
+ }
+ };
+
+ // loads a single img into an img element
+ static loadImage = (file: File): Promise<HTMLImageElement> => {
+ return new Promise((resolve, reject) => {
+ const img = new Image();
+
+ img.onload = () => resolve(img);
+ img.onerror = error => reject(error);
+
+ const url = URL.createObjectURL(file);
+ img.src = url;
+ });
+ };
+
+ // loads all images into img elements
+ static loadImagesUrl = async (imageUrls: string[]): Promise<HTMLImageElement[]> => {
+ try {
+ const imageElements = await Promise.all(imageUrls.map(url => this.loadImageUrl(url)));
+ return imageElements;
+ } catch (error) {
+ console.error(error);
+ return [];
+ }
+ };
+
+ // loads a single img into an img element
+ static loadImageUrl = (url: string): Promise<HTMLImageElement> => {
+ return new Promise((resolve, reject) => {
+ const img = new Image();
+
+ img.onload = () => resolve(img);
+ img.onerror = error => reject(error);
+
+ img.src = url;
+ });
+ };
+
+ // extracts a list of collors from an img element
+ static getImgColors = async (img: HTMLImageElement) => {
+ const colors = await extractColors(img, { distance: 0.35 });
+ return colors;
+ };
+
+ static simpleSort = (colors: FinalColor[]): FinalColor[] => {
+ colors.sort((a, b) => {
+ if (a.hue !== b.hue) {
+ return b.hue - a.hue;
+ } else {
+ return b.saturation - a.saturation;
+ }
+ });
+ return colors;
+ };
+
+ static sortColors(colors: FinalColor[]): FinalColor[] {
+ // Convert color from RGB to CIELAB format
+ const convertToLab = (color: FinalColor): number[] => {
+ const r = color.red / 255;
+ const g = color.green / 255;
+ const b = color.blue / 255;
+
+ const x = r * 0.4124564 + g * 0.3575761 + b * 0.1804375;
+ const y = r * 0.2126729 + g * 0.7151522 + b * 0.072175;
+ const z = r * 0.0193339 + g * 0.119192 + b * 0.9503041;
+
+ const pivot = 0.008856;
+ const factor = 903.3;
+
+ const fx = x > pivot ? Math.cbrt(x) : (factor * x + 16) / 116;
+ const fy = y > pivot ? Math.cbrt(y) : (factor * y + 16) / 116;
+ const fz = z > pivot ? Math.cbrt(z) : (factor * z + 16) / 116;
+
+ const L = 116 * fy - 16;
+ const a = (fx - fy) * 500;
+ const b1 = (fy - fz) * 200;
+
+ return [L, a, b1];
+ };
+
+ // Sort colors using CIELAB distance for smooth transitions
+ colors.sort((colorA, colorB) => {
+ const labA = convertToLab(colorA);
+ const labB = convertToLab(colorB);
+
+ // Calculate Euclidean distance in CIELAB space
+ const distanceA = Math.sqrt(Math.pow(labA[0] - labB[0], 2) + Math.pow(labA[1] - labB[1], 2) + Math.pow(labA[2] - labB[2], 2));
+
+ const distanceB = Math.sqrt(Math.pow(labB[0] - labA[0], 2) + Math.pow(labB[1] - labA[1], 2) + Math.pow(labB[2] - labA[2], 2));
+
+ return distanceA - distanceB; // Sort by CIELAB distance
+ });
+
+ return colors;
+ }
+
+ static hexToFinalColor = (hex: string): FinalColor => {
+ const rgb = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex);
+
+ if (!rgb) {
+ throw new Error('Invalid hex color format.');
+ }
+
+ const red = parseInt(rgb[1], 16);
+ const green = parseInt(rgb[2], 16);
+ const blue = parseInt(rgb[3], 16);
+
+ const max = Math.max(red, green, blue);
+ const min = Math.min(red, green, blue);
+ const area = max - min;
+ const intensity = (max + min) / 2;
+
+ let hue = 0;
+ let saturation = 0;
+ let lightness = intensity;
+
+ if (area !== 0) {
+ saturation = area / (1 - Math.abs(2 * intensity - 1));
+ if (max === red) {
+ hue = (60 * ((green - blue) / area) + 360) % 360;
+ } else if (max === green) {
+ hue = (60 * ((blue - red) / area) + 120) % 360;
+ } else {
+ hue = (60 * ((red - green) / area) + 240) % 360;
+ }
+ }
+
+ return {
+ hex,
+ red,
+ green,
+ blue,
+ area,
+ hue,
+ saturation,
+ lightness,
+ intensity,
+ };
+ };
+}
+
+// for reference
+
+// type FinalColor = {
+// hex: string;
+// red: number;
+// green: number;
+// blue: number;
+// area: number;
+// hue: number;
+// saturation: number;
+// lightness: number;
+// intensity: number;
+// }
diff --git a/src/client/views/PropertiesView.scss b/src/client/views/PropertiesView.scss
index 8581bdf73..b21828aa7 100644
--- a/src/client/views/PropertiesView.scss
+++ b/src/client/views/PropertiesView.scss
@@ -7,6 +7,21 @@
position: absolute;
right: 4;
}
+.propertiesView-palette {
+ cursor: pointer;
+ padding: 8px;
+ border-radius: 4px;
+ transition: all 0.2s ease;
+ &:hover {
+ background-color: #3b3c3e;
+ }
+}
+.styling-chatbox {
+ color: #000000;
+ width: 100%;
+ outline: none;
+ border: none;
+}
.propertiesView {
height: 100%;
width: 250;
diff --git a/src/client/views/PropertiesView.tsx b/src/client/views/PropertiesView.tsx
index 208ed56c9..5cfe0bd5f 100644
--- a/src/client/views/PropertiesView.tsx
+++ b/src/client/views/PropertiesView.tsx
@@ -96,6 +96,58 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
@observable openAppearance: boolean = true;
@observable openTransform: boolean = true;
@observable openFilters: boolean = false;
+ @observable openStyling: boolean = true;
+
+ // GPT styling
+ public styleInput: StyleInput | undefined;
+ @observable loadingStyles: boolean = false;
+ @observable generatedStyles: GeneratedResponse[] = [];
+ @observable inputDocs: Doc[] = [];
+ @observable selectedStyle: number = -1;
+ @observable useImageData = false;
+
+ @observable chatInput: string = '';
+
+ @action
+ setChatInput = (input: string) => {
+ this.chatInput = input;
+ };
+
+ @action
+ setLoading = (loading: boolean) => {
+ this.loadingStyles = loading;
+ };
+
+ @action
+ gptStyling = async () => {
+ // this.generatedStyles = [];
+ this.selectedStyle = -1;
+ this.setLoading(true);
+ console.log('Style input: ', this.styleInput);
+
+ if (!this.styleInput) return;
+
+ try {
+ let res: any;
+ if (this.generatedStyles.length === 0) {
+ res = await generatePalette(this.styleInput, this.useImageData, this.chatInput);
+ } else {
+ res = await generatePalette(this.styleInput, this.useImageData, this.chatInput, this.generatedStyles);
+ }
+ if (typeof res === 'string') {
+ console.log('Generated palettes: ', res);
+ const resObj = JSON.parse(res) as GeneratedResponse[];
+ this.setGeneratedStyles(resObj);
+ }
+ } catch (err) {
+ console.error(err);
+ }
+ this.setLoading(false);
+ };
+
+ @action
+ setGeneratedStyles = (responses: GeneratedResponse[]) => (this.generatedStyles = responses);
+ setInputDocs = (docs: Doc[]) => (this.inputDocs = docs);
//Pres Trails booleans:
@observable openPresTransitions: boolean = true;
@@ -1167,6 +1219,60 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
}
};
+ @action
+ styleCollection = (i: number) => {
+ this.selectedStyle = i;
+ const resObj = this.generatedStyles[i];
+ if (this.selectedDoc && this.selectedDoc.type === 'collection') {
+ this.selectedDoc.backgroundColor = resObj.collectionBackgroundColor;
+ resObj.documentsWithColors.forEach((elem, i) => (this.inputDocs[i].backgroundColor = elem.color));
+ }
+ };
+
+ // GPT styling
+ @computed get stylingSubMenu() {
+ return (
+ <PropertiesSection title="Styling" isOpen={this.openStyling} setIsOpen={bool => (this.openStyling = bool)} onDoubleClick={() => this.CloseAll()}>
+ <div className="propertiesView-content" style={{ position: 'relative', height: 'auto', display: 'flex', flexDirection: 'column', alignItems: 'center', gap: '4px' }}>
+ {this.generatedStyles.length > 0 &&
+ this.generatedStyles.map((style, i) => (
+ <div
+ key={i}
+ className="propertiesView-palette"
+ style={{ display: 'flex', gap: '4px', backgroundColor: this.selectedStyle === i ? StrCast(Doc.UserDoc().userVariantColor) : '#00000000' }}
+ onClick={() => this.styleCollection(i)}>
+ <div style={{ width: '24px', height: '24px', backgroundColor: style.collectionBackgroundColor, borderRadius: '2px' }}></div>
+ {ExtractColors.sortColors(style.documentsWithColors.map(doc => ExtractColors.hexToFinalColor(doc.color))).map((c, i) => (
+ <div key={i} style={{ width: '24px', height: '24px', backgroundColor: c.hex, borderRadius: '2px' }}></div>
+ ))}
+ </div>
+ ))}
+ {this.loadingStyles && 'Generating styles...'}
+ <ReactTextareaAutosize
+ minRows={3}
+ placeholder="Customize..."
+ className="styling-chatbox"
+ autoFocus={true}
+ value={this.chatInput}
+ onChange={e => {
+ this.setChatInput(e.target.value);
+ }}
+ onKeyDown={e => {
+ e.stopPropagation();
+ }}
+ />
+ <div style={{ display: 'flex', justifyContent: 'flex-end', gap: '16px' }}>
+ <div style={{ display: 'flex', gap: '4px', alignItems: 'center' }}>
+ <label style={{ margin: '0px' }}>Use Images </label>
+ <input style={{ margin: '0px' }} type="checkbox" checked={this.useImageData} onChange={action(e => (this.useImageData = e.target.checked))} />
+ </div>
+ <Button text={'Regenerate'} type={Type.TERT} color={StrCast(Doc.UserDoc().userVariantColor)} onClick={this.gptStyling} />
+ </div>
+ </div>
+ </PropertiesSection>
+ );
+ }
+
@computed get filtersSubMenu() {
return (
<PropertiesSection title="Filters" isOpen={this.openFilters} setIsOpen={bool => (this.openFilters = bool)} onDoubleClick={() => this.CloseAll()}>
@@ -1252,10 +1358,23 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
'change link relationship'
);
+ handleColorChange = undoable(
+ action((value: string) => {
+ if (LinkManager.currentLink && this.selectedDoc) {
+ this.setColorValue(value);
+ }
+ }),
+ 'change link color'
+ );
+
@undoBatch
setDescripValue = action((value: string) => {
if (LinkManager.currentLink) {
Doc.GetProto(LinkManager.currentLink).link_description = value;
+
+ if (LinkManager.currentLink.show_description === undefined) {
+ LinkManager.currentLink.show_description = !LinkManager.currentLink.show_description;
+ }
}
});
@@ -1301,6 +1420,13 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
}
});
+ @undoBatch
+ setColorValue = action((value: string) => {
+ if (LinkManager.currentLink) {
+ Doc.GetProto(LinkManager.currentLink).link_color = value;
+ }
+ });
+
changeFollowBehavior = undoable((loc: Opt<string>) => this.sourceAnchor && (this.sourceAnchor.followLinkLocation = loc), 'change follow behavior');
@undoBatch
@@ -1469,6 +1595,28 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
<FontAwesomeIcon className="fa-icon" icon={faArrowRight as IconLookup} size="lg" />
</button>
</div>
+ <div className="propertiesView-input inline" style={{ marginLeft: 10 }}>
+ <p>Show description</p>
+ <button
+ style={{ background: !LinkManager.currentLink?.show_description ? '' : '#4476f7', borderRadius: 3 }}
+ onPointerDown={e => this.toggleLinkProp(e, 'show_description')}
+ onClick={e => e.stopPropagation()}
+ className="propertiesButton">
+ <FontAwesomeIcon className="fa-icon" icon={faArrowRight as IconLookup} size="lg" />
+ </button>
+ </div>
+ <div className="propertiesView-input inline" style={{ marginLeft: 10 }}>
+ <p>Link color</p>
+ <ColorPicker
+ tooltip={'User Color'} //
+ color={SettingsManager.userColor}
+ type={Type.SEC}
+ icon={<FaFillDrip />}
+ selectedColor={LinkManager.currentLink?.link_color ? StrCast(LinkManager.currentLink?.link_color) : '#449ef7'}
+ setSelectedColor={this.handleColorChange}
+ setFinalColor={this.handleColorChange}
+ />
+ </div>
</div>
{!hasSelectedAnchor ? null : (
<div className="propertiesView-section">
@@ -1703,6 +1851,7 @@ export class PropertiesView extends ObservableReactComponent<PropertiesViewProps
<div className="propertiesView-name">{this.editableTitle}</div>
<div className="propertiesView-type"> {this.currentType} </div>
+ {this.stylingSubMenu}
{this.optionsSubMenu}
{this.linksSubMenu}
{!LinkManager.currentLink || !this.openLinks ? null : this.linkProperties}
diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx
index f0a31a8c6..1b9627bb6 100644
--- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx
+++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormLinkView.tsx
@@ -266,7 +266,8 @@ export class CollectionFreeFormLinkView extends ObservableReactComponent<Collect
const linkSize = Doc.noviceMode || currRelationshipIndex === -1 || currRelationshipIndex >= linkRelationshipSizes.length ? -1 : linkRelationshipSizes[currRelationshipIndex];
//access stroke color using index of the relationship in the color list (default black)
- const stroke = currRelationshipIndex === -1 || currRelationshipIndex >= linkColorList.length ? StrCast(link._backgroundColor, 'black') : linkColorList[currRelationshipIndex];
+ // const stroke = currRelationshipIndex === -1 || currRelationshipIndex >= linkColorList.length ? StrCast(link._backgroundColor, 'black') : linkColorList[currRelationshipIndex];
+ const stroke = link.link_color ? Field.toString(link.link_color as any as Field) : '#449ef7';
// const hexStroke = this.rgbToHex(stroke)
//calculate stroke width/thickness based on the relative importance of the relationshipship (i.e. how many links the relationship has)
@@ -305,13 +306,14 @@ export class CollectionFreeFormLinkView extends ObservableReactComponent<Collect
d={`M ${pt1[0]} ${pt1[1]} C ${pt1[0] + pt1norm[0]} ${pt1[1] + pt1norm[1]}, ${pt2[0] + pt2norm[0]} ${pt2[1] + pt2norm[1]}, ${pt2[0]} ${pt2[1]}`}
markerEnd={link.link_displayArrow ? `url(#${link[Id] + 'arrowhead'})` : ''}
/>
- {textX === undefined || !linkDescription ? null : (
- <text filter={`url(#${link[Id] + 'background'})`} className="collectionfreeformlinkview-linkText" x={textX} y={textY} onPointerDown={this.pointerDown}>
- <tspan>&nbsp;</tspan>
- <tspan dy="2">{linkDescription.substring(0, 50) + (linkDescription.length > 50 ? '...' : '')}</tspan>
- <tspan dy="2">&nbsp;</tspan>
- </text>
- )}
+ {link.show_description &&
+ (textX === undefined || !linkDescription ? null : (
+ <text filter={`url(#${link[Id] + 'background'})`} className="collectionfreeformlinkview-linkText" x={textX} y={textY} onPointerDown={this.pointerDown}>
+ <tspan>&nbsp;</tspan>
+ <tspan dy="2">{linkDescription.substring(0, 50) + (linkDescription.length > 50 ? '...' : '')}</tspan>
+ <tspan dy="2">&nbsp;</tspan>
+ </text>
+ ))}
</>
);
}
diff --git a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
index 53dc389b4..818c754c3 100644
--- a/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
+++ b/src/client/views/collections/collectionFreeForm/CollectionFreeFormView.tsx
@@ -26,7 +26,7 @@ import { FollowLinkScript } from '../../../util/LinkFollower';
import { ReplayMovements } from '../../../util/ReplayMovements';
import { ScriptingGlobals } from '../../../util/ScriptingGlobals';
import { SelectionManager } from '../../../util/SelectionManager';
-import { freeformScrollMode } from '../../../util/SettingsManager';
+import { freeformScrollMode, SettingsManager } from '../../../util/SettingsManager';
import { SnappingManager } from '../../../util/SnappingManager';
import { Transform } from '../../../util/Transform';
import { undoBatch, UndoManager } from '../../../util/UndoManager';
@@ -817,26 +817,23 @@ export class CollectionFreeFormView extends CollectionSubView<Partial<collection
eraserMax.X >= inkViewBounds.left &&
eraserMax.Y >= inkViewBounds.top
)
- .reduce(
- (intersections, { inkStroke, inkView }) => {
- const { inkData } = inkStroke.inkScaledData();
- // Convert from screen space to ink space for the intersection.
- const prevPointInkSpace = inkStroke.ptFromScreen(lastPoint);
- const currPointInkSpace = inkStroke.ptFromScreen(currPoint);
- for (var i = 0; i < inkData.length - 3; i += 4) {
- const rawIntersects = InkField.Segment(inkData, i).intersects({
- // compute all unique intersections
- p1: { x: prevPointInkSpace.X, y: prevPointInkSpace.Y },
- p2: { x: currPointInkSpace.X, y: currPointInkSpace.Y },
- });
- const intersects = Array.from(new Set(rawIntersects as (number | string)[])); // convert to more manageable union array type
- // return tuples of the inkingStroke intersected, and the t value of the intersection
- intersections.push(...intersects.map(t => ({ inkView, t: +t + Math.floor(i / 4) }))); // convert string t's to numbers and add start of curve segment to convert from local t value to t value along complete curve
- }
- return intersections;
- },
- [] as { t: number; inkView: DocumentView }[]
- );
+ .reduce((intersections, { inkStroke, inkView }) => {
+ const { inkData } = inkStroke.inkScaledData();
+ // Convert from screen space to ink space for the intersection.
+ const prevPointInkSpace = inkStroke.ptFromScreen(lastPoint);
+ const currPointInkSpace = inkStroke.ptFromScreen(currPoint);
+ for (var i = 0; i < inkData.length - 3; i += 4) {
+ const rawIntersects = InkField.Segment(inkData, i).intersects({
+ // compute all unique intersections
+ p1: { x: prevPointInkSpace.X, y: prevPointInkSpace.Y },
+ p2: { x: currPointInkSpace.X, y: currPointInkSpace.Y },
+ });
+ const intersects = Array.from(new Set(rawIntersects as (number | string)[])); // convert to more manageable union array type
+ // return tuples of the inkingStroke intersected, and the t value of the intersection
+ intersections.push(...intersects.map(t => ({ inkView, t: +t + Math.floor(i / 4) }))); // convert string t's to numbers and add start of curve segment to convert from local t value to t value along complete curve
+ }
+ return intersections;
+ }, [] as { t: number; inkView: DocumentView }[]);
};
/**
@@ -1631,6 +1628,75 @@ export class CollectionFreeFormView extends CollectionSubView<Partial<collection
}
};
+ printDoc = (doc: Doc) => {
+ console.log('Printing keys');
+ Object.keys(doc).forEach(key => {
+ console.log(key, ':', doc[key]);
+ });
+ };
+
+ @action
+ openProperties = () => {
+ SettingsManager.propertiesWidth = 300;
+ };
+
+ choosePath(url: URL) {
+ if (!url?.href) return '';
+ const lower = url.href.toLowerCase();
+ if (url.protocol === 'data') return url.href;
+ if (url.href.indexOf(window.location.origin) === -1 && url.href.indexOf('dashblobstore') === -1) return Utils.CorsProxy(url.href);
+ if (!/\.(png|jpg|jpeg|gif|webp)$/.test(lower)) return `/assets/unknown-file-icon-hi.png`;
+
+ const ext = extname(url.href);
+ return url.href.replace(ext, '_m' + ext);
+ }
+
+ // gpt styling
+ @action
+ gptStyling = async () => {
+ // clear it in properties instead
+ if (!PropertiesView.Instance) return;
+ this.openProperties();
+ PropertiesView.Instance.setGeneratedStyles([]);
+ PropertiesView.Instance.selectedStyle = -1;
+ PropertiesView.Instance.useImageData = false;
+
+ console.log('Title', this.rootDoc.title);
+ console.log('bgcolor', this.layoutDoc._backgroundColor);
+ // doc.backgroundColor
+ const inputDocs = this.childDocs.filter(doc => doc.type == 'rich text');
+ const imgDocs = this.childDocs.filter(doc => doc.type == 'image');
+ const imgUrls = imgDocs.map(doc => this.choosePath((doc.data as ImageField).url));
+
+ const imageElements = await ExtractColors.loadImagesUrl(imgUrls);
+ const colors = await Promise.all(imageElements.map(elem => ExtractColors.getImgColors(elem)));
+ let colorHexes = colors
+ .reduce((acc, curr) => acc.concat(curr), [])
+ .map(color => color.hex)
+ .slice(0, 10);
+ console.log('Hexes', colorHexes);
+
+ PropertiesView.Instance?.setInputDocs(inputDocs);
+
+ // also pass it colors
+ const gptInput: StyleInputDocument[] = inputDocs.map((doc, i) => ({
+ id: i,
+ textContent: (doc.text as RichTextField)?.Text,
+ textSize: 16,
+ }));
+
+ const collectionDescription = StrCast(this.rootDoc.title);
+
+ const styleInput = {
+ collectionDescription,
+ documents: gptInput,
+ imageColors: colorHexes,
+ };
+
+ PropertiesView.Instance.styleInput = styleInput;
+ PropertiesView.Instance.gptStyling();
+ };
+
onContextMenu = (e: React.MouseEvent) => {
if (this._props.isAnnotationOverlay || !ContextMenu.Instance) return;
diff --git a/src/client/views/global/globalScripts.ts b/src/client/views/global/globalScripts.ts
index 3084a7972..c2d6cea04 100644
--- a/src/client/views/global/globalScripts.ts
+++ b/src/client/views/global/globalScripts.ts
@@ -31,6 +31,10 @@ ScriptingGlobals.add(function setView(view: string) {
selected ? (selected._type_collection = view) : console.log('[FontIconBox.tsx] changeView failed');
});
+ScriptingGlobals.add(function setSettingBgColor(isSetting: boolean) {
+ Doc.UserDoc().settingBgColor = isSetting;
+});
+
// toggle: Set overlay status of selected document
ScriptingGlobals.add(function setBackgroundColor(color?: string, checkResult?: boolean) {
const selectedViews = SelectionManager.Views;
diff --git a/src/client/views/nodes/formattedText/FormattedTextBox.tsx b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
index 731ab1d53..f9cef1a60 100644
--- a/src/client/views/nodes/formattedText/FormattedTextBox.tsx
+++ b/src/client/views/nodes/formattedText/FormattedTextBox.tsx
@@ -944,13 +944,12 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
try {
let res = await gptAPICall((this.dataDoc.text as RichTextField)?.Text, GPTCallType.COMPLETION);
if (!res) {
- console.error('GPT call failed');
this.animateRes(0, 'Something went wrong.');
} else {
this.animateRes(0, res);
}
} catch (err) {
- console.error('GPT call failed');
+ console.error(err);
this.animateRes(0, 'Something went wrong.');
}
});
@@ -964,6 +963,7 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
};
breakupDictation = () => {
+ console.log('breakup');
if (this._editorView && this._recordingDictation) {
this.stopDictation(true);
this._break = true;
@@ -1146,6 +1146,25 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
@computed get contentScaling() {
return Doc.NativeAspect(this.Document, this.dataDoc, false) ? this._props.NativeDimScaling?.() || 1 : 1;
}
+
+ @action
+ checkBackgroundColor() {
+ console.log('checking bg color 1');
+ if (BoolCast(Doc.UserDoc().settingBgColor)) return;
+ console.log('checking bg color 2');
+ let fontColor = '#000000';
+ if (isDarkMode(StrCast(this.rootDoc._backgroundColor))) {
+ fontColor = '#ffffff';
+ }
+ // set text to white
+ if (!this._editorView) return;
+ const tr = this._editorView?.state.tr;
+
+ tr.setSelection(TextSelection.create(tr.doc, 0, tr.doc.content.size));
+ tr.addMark(0, tr.doc.content.size, schema.marks.pFontColor.create({ color: fontColor }));
+ this._editorView.dispatch(tr);
+ }
+
componentDidMount() {
!this._props.dontSelectOnLoad && this._props.setContentViewBox?.(this); // this tells the DocumentView that this AudioBox is the "content" of the document. this allows the DocumentView to indirectly call getAnchor() on the AudioBox when making a link.
this._cachedLinks = LinkManager.Links(this.Document);
@@ -1168,6 +1187,10 @@ export class FormattedTextBox extends ViewBoxAnnotatableComponent<FieldViewProps
({ width, scrollHeight, layout_autoHeight }) => width && layout_autoHeight && this.resetNativeHeight(scrollHeight),
{ fireImmediately: true }
);
+ this._disposers.bgColor = reaction(
+ () => this.rootDoc._backgroundColor,
+ color => this.checkBackgroundColor()
+ );
this._disposers.componentHeights = reaction(
// set the document height when one of the component heights changes and layout_autoHeight is on
() => ({ sidebarHeight: this.sidebarHeight, textHeight: this.textHeight, layout_autoHeight: this.layout_autoHeight, marginsHeight: this.layout_autoHeightMargins }),
diff --git a/src/client/views/nodes/trails/PresBox.scss b/src/client/views/nodes/trails/PresBox.scss
index 3b34a1f90..1537ad0b8 100644
--- a/src/client/views/nodes/trails/PresBox.scss
+++ b/src/client/views/nodes/trails/PresBox.scss
@@ -15,6 +15,28 @@
//overflow: hidden;
transition: 0.7s opacity ease;
+ .presBox-chatbox {
+ position: fixed;
+ bottom: 8px;
+ left: 8px;
+ width: calc(100% - 16px);
+ min-height: 100px;
+ border-radius: 16px;
+ padding: 16px;
+ gap: 8px;
+ z-index: 999;
+ display: flex;
+ flex-direction: column;
+ justify-content: space-between;
+ background-color: #ffffff;
+ box-shadow: 0 2px 5px #7474748d;
+
+ .pres-chatbox {
+ outline: none;
+ border: none;
+ }
+ }
+
.presBox-listCont {
position: relative;
height: calc(100% - 67px);
diff --git a/src/client/views/nodes/trails/PresBox.tsx b/src/client/views/nodes/trails/PresBox.tsx
index 9e5ea9524..b2059b185 100644
--- a/src/client/views/nodes/trails/PresBox.tsx
+++ b/src/client/views/nodes/trails/PresBox.tsx
@@ -35,6 +35,7 @@ import { DocumentView, OpenWhere, OpenWhereMod } from '../DocumentView';
import { FocusViewOptions, FieldView, FieldViewProps } from '../FieldView';
import { ScriptingBox } from '../ScriptingBox';
import './PresBox.scss';
+import ReactLoading from 'react-loading';
import { PresEffect, PresEffectDirection, PresMovement, PresStatus } from './PresEnums';
export interface pinDataTypes {
scrollable?: boolean;
@@ -103,7 +104,37 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
@observable _treeViewMap: Map<Doc, number> = new Map();
@observable _presKeyEvents: boolean = false;
@observable _forceKeyEvents: boolean = false;
- @computed get isTreeOrStack() {
+
+ // GPT
+ private _inputref: HTMLTextAreaElement | null = null;
+ @observable chatActive: boolean = false;
+ @observable chatInput: string = '';
+ public slideToModify: Doc | null = null;
+ @observable isRecording: boolean = false;
+ @observable isLoading: boolean = false;
+
+ @action
+ setChatInput = (input: string) => {
+ this.chatInput = input;
+ };
+
+ @action
+ setIsLoading = (isLoading: boolean) => {
+ this.isLoading = isLoading;
+ };
+
+ @action
+ public setChatActive = (active: boolean) => {
+ this.chatActive = active;
+ };
+
+ @action
+ public setIsRecording = (isRecording: boolean) => {
+ this.isRecording = isRecording;
+ };
+
+ @computed
+ get isTreeOrStack() {
return [CollectionViewType.Tree, CollectionViewType.Stacking].includes(StrCast(this.layoutDoc._type_collection) as any);
}
@computed get isTree() {
@@ -226,6 +257,71 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
}
};
+ // GPT
+
+ recordDictation = () => {
+ this.setIsRecording(true);
+ this.setChatInput('');
+ DictationManager.Controls.listen({
+ interimHandler: this.setDictationContent,
+ continuous: { indefinite: false },
+ }).then(results => {
+ if (results && [DictationManager.Controls.Infringed].includes(results)) {
+ DictationManager.Controls.stop();
+ }
+ });
+ };
+ stopDictation = (abort: boolean) => {
+ this.setIsRecording(false);
+ DictationManager.Controls.stop(!abort);
+ };
+
+ setDictationContent = (value: string) => {
+ this.setChatInput(value);
+ // // Get the current cursor position
+ // if (!this._inputref) return;
+ // const cursorPosition = this._inputref.selectionStart;
+ // const currentValue = this.chatInput;
+
+ // // split before and after
+ // const textBeforeCursor = currentValue.slice(0, cursorPosition);
+ // const textAfterCursor = currentValue.slice(cursorPosition);
+
+ // // insertion
+ // const updatedText = textBeforeCursor + value + textAfterCursor;
+
+ // // Update the textarea value
+ // this.setChatInput(updatedText);
+
+ // // set new cursor pos
+ // const newCursorPosition = cursorPosition + value.length;
+ // this._inputref.setSelectionRange(newCursorPosition, newCursorPosition);
+ };
+
+ @action
+ customizeWithGPT = async (input: string) => {
+ // const testInput = 'change title to Customized Slide, transition for 2.3s with fade in effect';
+ if (!this.slideToModify) return;
+ this.setIsRecording(false);
+ this.setIsLoading(true);
+ try {
+ const res = await gptTrailSlideCustomization(input);
+ if (typeof res === 'string') {
+ const resObj = JSON.parse(res);
+ console.log('Result ', resObj);
+ // this.activeItem
+ for (let key in resObj) {
+ if (resObj[key]) {
+ this.slideToModify[key] = resObj[key];
+ }
+ }
+ }
+ } catch (err) {
+ console.error(err);
+ }
+ this.setIsLoading(false);
+ };
+
//TODO: al: it seems currently that tempMedia doesn't stop onslidechange after clicking the button; the time the tempmedia stop depends on the start & end time
// TODO: to handle child slides (entering into subtrail and exiting), also the next() and back() functions
// No more frames in current doc and next slide is defined, therefore move to next slide
@@ -732,6 +828,8 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
*/
navigateToActiveItem = (afterNav?: () => void) => {
const activeItem: Doc = this.activeItem;
+ // GPT update
+ this.slideToModify = activeItem;
const targetDoc: Doc = this.targetDoc;
const finished = () => {
afterNav?.();
@@ -1189,6 +1287,7 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
@action
keyEvents = (e: KeyboardEvent) => {
if (e.target instanceof HTMLInputElement) return;
+ if (e.target instanceof HTMLTextAreaElement) return;
let handled = false;
const anchorNode = document.activeElement as HTMLDivElement;
if (anchorNode && anchorNode.className?.includes('lm_title')) return;
@@ -2623,7 +2722,6 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
/>
) : null}
</div>
-
{/* {
// if the document type is a presentation, then the collection stacking view has a "+ new slide" button at the bottom of the stack
<Tooltip title={<div className="dash-tooltip">{'Click on document to pin to presentaiton or make a marquee selection to pin your desired view'}</div>}>
@@ -2633,6 +2731,66 @@ export class PresBox extends ViewBoxBaseComponent<FieldViewProps>() {
</Tooltip>
} */}
</div>
+ {/* presbox chatbox */}
+ {this.chatActive && (
+ <div className="presBox-chatbox">
+ <div style={{ alignSelf: 'flex-end' }}>
+ {this.isLoading ? (
+ <ReactLoading type="spin" color={StrCast(Doc.UserDoc().userVariantColor)} width={14} height={14} />
+ ) : (
+ <IconButton
+ type={Type.PRIM}
+ color={StrCast(Doc.UserDoc().userVariantColor)}
+ tooltip="Close"
+ icon={<BiX size={'16px'} />}
+ onClick={() => {
+ this.setChatActive(false);
+ }}
+ />
+ )}
+ </div>
+ <TextareaAutosize
+ ref={r => (this._inputref = r)}
+ minRows={3}
+ placeholder="Customize..."
+ className="pres-chatbox"
+ autoFocus={true}
+ value={this.chatInput}
+ onChange={e => {
+ this.setChatInput(e.target.value);
+ }}
+ onKeyDown={e => {
+ this.stopDictation(true);
+ e.stopPropagation();
+ }}
+ />
+ {/* <input className="chatbox" placeholder="Customize..." value={this.chatInput} onChange={e => this.setChatInput(e.target.value)} /> */}
+ <div style={{ alignSelf: 'flex-end', display: 'flex', gap: '8px' }}>
+ <IconButton
+ type={Type.TERT}
+ color={this.isRecording ? 'red' : StrCast(Doc.UserDoc().userVariantColor)}
+ tooltip="Record"
+ icon={<BiMicrophone size={'16px'} />}
+ onClick={() => {
+ if (!this.isRecording) {
+ this.recordDictation();
+ } else {
+ this.stopDictation(true);
+ }
+ }}
+ />
+ <IconButton
+ type={Type.TERT}
+ color={!this.isLoading ? StrCast(Doc.UserDoc().userVariantColor) : '#7c7c7c'}
+ tooltip="Send"
+ icon={<AiOutlineSend size={'16px'} />}
+ onClick={() => {
+ this.customizeWithGPT(this.chatInput);
+ }}
+ />
+ </div>
+ </div>
+ )}
</div>
);
}
diff --git a/src/client/views/nodes/trails/PresElementBox.tsx b/src/client/views/nodes/trails/PresElementBox.tsx
index 5b2aa1cde..ed2f25fb6 100644
--- a/src/client/views/nodes/trails/PresElementBox.tsx
+++ b/src/client/views/nodes/trails/PresElementBox.tsx
@@ -409,6 +409,8 @@ export class PresElementBox extends ViewBoxBaseComponent<FieldViewProps>() {
return presBoxDocView ? presBoxDocView._props.PanelWidth() : width ? width : 300;
}
+ // GPT
+
@computed get presButtons() {
const presBox = this.presBox;
const presBoxColor = StrCast(presBox?._backgroundColor);
@@ -512,6 +514,19 @@ export class PresElementBox extends ViewBoxBaseComponent<FieldViewProps>() {
</div>
</Tooltip>
);
+ items.push(
+ <Tooltip key="customize" title={<div className="dash-tooltip">Customize</div>}>
+ <div
+ className={'slideButton'}
+ onClick={() => {
+ PresBox.Instance.setChatActive(true);
+ PresBox.Instance.slideToModify = this.rootDoc;
+ PresBox.Instance.recordDictation();
+ }}>
+ <FontAwesomeIcon icon={'message'} onPointerDown={e => e.stopPropagation()} />
+ </div>
+ </Tooltip>
+ );
return items;
}
diff --git a/src/client/views/pdf/GPTPopup/GPTPopup.scss b/src/client/views/pdf/GPTPopup/GPTPopup.scss
index 5d966395c..48659d0e7 100644
--- a/src/client/views/pdf/GPTPopup/GPTPopup.scss
+++ b/src/client/views/pdf/GPTPopup/GPTPopup.scss
@@ -11,8 +11,8 @@ $highlightedText: #82e0ff;
right: 10px;
width: 250px;
min-height: 200px;
- border-radius: 15px;
- padding: 15px;
+ border-radius: 16px;
+ padding: 16px;
padding-bottom: 0;
z-index: 999;
display: flex;
diff --git a/src/client/views/pdf/GPTPopup/GPTPopup.tsx b/src/client/views/pdf/GPTPopup/GPTPopup.tsx
index da8a88803..42562986f 100644
--- a/src/client/views/pdf/GPTPopup/GPTPopup.tsx
+++ b/src/client/views/pdf/GPTPopup/GPTPopup.tsx
@@ -119,13 +119,15 @@ export class GPTPopup extends ObservableReactComponent<GPTPopupProps> {
try {
let image_urls = await gptImageCall(this.imgDesc);
if (image_urls && image_urls[0]) {
+ // need to fix this
const [result] = await Networking.PostToServer('/uploadRemoteImage', { sources: [image_urls[0]] });
+ console.log('Result', result);
+ console.log('Client', result.accessPaths.agnostic.client);
const source = Utils.prepend(result.accessPaths.agnostic.client);
this.setImgUrls([[image_urls[0], source]]);
}
} catch (err) {
console.log(err);
- return '';
}
this.setLoading(false);
};