mirror of
https://github.com/gristlabs/grist-core.git
synced 2024-10-27 20:44:07 +00:00
0be858c19d
This adds an ASSISTANT_CHAT_COMPLETION_ENDPOINT which can be used to enable AI Assistance instead of an OpenAI API key. The assistant then works against compatible endpoints, in the mechanical sense. Quality of course will depend on the model. I found some tweaks to the prompt that work well both for Llama-2 and for OpenAI's models, but I'm not including them here because they would conflict with some prompt changes that are already in the works. Co-authored-by: Alex Hall <alex.mojaki@gmail.com>
27 lines
794 B
TypeScript
27 lines
794 B
TypeScript
import {getGristConfig} from 'app/common/urlUtils';
|
|
import {get as getBrowserGlobals} from 'app/client/lib/browserGlobals';
|
|
import {localStorageBoolObs} from 'app/client/lib/localStorageObs';
|
|
import {Observable} from 'grainjs';
|
|
|
|
/**
|
|
* Are comments enabled by feature flag.
|
|
*/
|
|
export function COMMENTS(): Observable<boolean> {
|
|
const G = getBrowserGlobals('document', 'window');
|
|
if (!G.window.COMMENTS) {
|
|
G.window.COMMENTS = localStorageBoolObs('feature-comments', Boolean(getGristConfig().featureComments));
|
|
}
|
|
return G.window.COMMENTS;
|
|
}
|
|
|
|
/**
|
|
* Does backend supports AI assistant.
|
|
*/
|
|
export function HAS_FORMULA_ASSISTANT() {
|
|
return Boolean(getGristConfig().featureFormulaAssistant);
|
|
}
|
|
|
|
export function WHICH_FORMULA_ASSISTANT() {
|
|
return getGristConfig().assistantService;
|
|
}
|