allow AI Assistance to run against any chat-completion-style endpoint (#630)

This adds an ASSISTANT_CHAT_COMPLETION_ENDPOINT which can be used
to enable AI Assistance instead of an OpenAI API key. The assistant
then works against compatible endpoints, in the mechanical sense.
Quality of course will depend on the model. I found some tweaks
to the prompt that work well both for Llama-2 and for OpenAI's models,
but I'm not including them here because they would conflict with some
prompt changes that are already in the works.

Co-authored-by: Alex Hall <alex.mojaki@gmail.com>
This commit is contained in:
Paul Fitzpatrick
2023-08-18 16:14:42 -04:00
committed by GitHub
parent 5705c37d02
commit 0be858c19d
7 changed files with 81 additions and 23 deletions

View File

@@ -20,3 +20,7 @@ export function COMMENTS(): Observable<boolean> {
export function HAS_FORMULA_ASSISTANT() {
return Boolean(getGristConfig().featureFormulaAssistant);
}
export function WHICH_FORMULA_ASSISTANT() {
return getGristConfig().assistantService;
}

View File

@@ -6,7 +6,7 @@ import {movable} from 'app/client/lib/popupUtils';
import {logTelemetryEvent} from 'app/client/lib/telemetry';
import {ColumnRec, ViewFieldRec} from 'app/client/models/DocModel';
import {ChatMessage} from 'app/client/models/entities/ColumnRec';
import {HAS_FORMULA_ASSISTANT} from 'app/client/models/features';
import {HAS_FORMULA_ASSISTANT, WHICH_FORMULA_ASSISTANT} from 'app/client/models/features';
import {getLoginOrSignupUrl, urlState} from 'app/client/models/gristUrlState';
import {buildHighlightedCode} from 'app/client/ui/CodeHighlight';
import {autoGrow} from 'app/client/ui/forms';
@@ -879,7 +879,7 @@ class ChatHistory extends Disposable {
'"Please calculate the total invoice amount."'
),
),
cssAiMessageBullet(
(WHICH_FORMULA_ASSISTANT() === 'OpenAI') ? cssAiMessageBullet(
cssTickIcon('Tick'),
dom('div',
t(
@@ -891,7 +891,7 @@ class ChatHistory extends Disposable {
}
),
),
),
) : null,
),
cssAiMessageParagraph(
t(