mirror of
https://github.com/gristlabs/grist-core.git
synced 2026-03-02 04:09:24 +00:00
allow AI Assistance to run against any chat-completion-style endpoint (#630)
This adds an ASSISTANT_CHAT_COMPLETION_ENDPOINT which can be used to enable AI Assistance instead of an OpenAI API key. The assistant then works against compatible endpoints, in the mechanical sense. Quality of course will depend on the model. I found some tweaks to the prompt that work well both for Llama-2 and for OpenAI's models, but I'm not including them here because they would conflict with some prompt changes that are already in the works. Co-authored-by: Alex Hall <alex.mojaki@gmail.com>
This commit is contained in:
@@ -6,7 +6,7 @@ import {movable} from 'app/client/lib/popupUtils';
|
||||
import {logTelemetryEvent} from 'app/client/lib/telemetry';
|
||||
import {ColumnRec, ViewFieldRec} from 'app/client/models/DocModel';
|
||||
import {ChatMessage} from 'app/client/models/entities/ColumnRec';
|
||||
import {HAS_FORMULA_ASSISTANT} from 'app/client/models/features';
|
||||
import {HAS_FORMULA_ASSISTANT, WHICH_FORMULA_ASSISTANT} from 'app/client/models/features';
|
||||
import {getLoginOrSignupUrl, urlState} from 'app/client/models/gristUrlState';
|
||||
import {buildHighlightedCode} from 'app/client/ui/CodeHighlight';
|
||||
import {autoGrow} from 'app/client/ui/forms';
|
||||
@@ -879,7 +879,7 @@ class ChatHistory extends Disposable {
|
||||
'"Please calculate the total invoice amount."'
|
||||
),
|
||||
),
|
||||
cssAiMessageBullet(
|
||||
(WHICH_FORMULA_ASSISTANT() === 'OpenAI') ? cssAiMessageBullet(
|
||||
cssTickIcon('Tick'),
|
||||
dom('div',
|
||||
t(
|
||||
@@ -891,7 +891,7 @@ class ChatHistory extends Disposable {
|
||||
}
|
||||
),
|
||||
),
|
||||
),
|
||||
) : null,
|
||||
),
|
||||
cssAiMessageParagraph(
|
||||
t(
|
||||
|
||||
Reference in New Issue
Block a user