(core) External requests

Summary:
Adds a Python function `REQUEST` which makes an HTTP GET request. Behind the scenes it:

- Raises a special exception to stop trying to evaluate the current cell and just keep the existing value.
- Notes the request arguments which will be returned by `apply_user_actions`.
- Makes the actual request in NodeJS, which sends back the raw response data in a new action `RespondToRequests` which reevaluates the cell(s) that made the request.
- Wraps the response data in a class which mimics the `Response` class of the `requests` library.

In certain cases, this asynchronous flow doesn't work and the sandbox will instead synchronously call an exported JS method:

- When reevaluating a single cell to get a formula error, the request is made synchronously.
- When a formula makes multiple requests, the earlier responses are retrieved synchronously from files which store responses as long as needed to complete evaluating formulas. See https://grist.slack.com/archives/CL1LQ8AT0/p1653399747810139

Test Plan: Added Python and nbrowser tests.

Reviewers: georgegevoian

Reviewed By: georgegevoian

Subscribers: paulfitz, dsagal

Differential Revision: https://phab.getgrist.com/D3429
This commit is contained in:
Alex Hall 2022-06-17 20:49:18 +02:00
parent abebe812db
commit 9fffb491f9
17 changed files with 582 additions and 24 deletions

View File

@ -62,6 +62,16 @@ export interface SandboxActionBundle {
undo: Array<EnvContent<DocAction>>; // Inverse actions for all 'stored' actions.
retValues: any[]; // Contains retValue for each of userActions.
rowCount: number;
// Mapping of keys (hashes of request args) to all unique requests made in a round of calculation
requests?: Record<string, SandboxRequest>;
}
// Represents a unique call to the Python REQUEST function
export interface SandboxRequest {
url: string;
params: Record<string, string> | null;
headers: Record<string, string> | null;
deps: unknown; // pass back to the sandbox unchanged in the response
}
// Local action that's been applied. It now has an actionNum, and includes doc actions packaged

View File

@ -144,6 +144,9 @@ export interface TableRecordValue {
export type UserAction = Array<string|number|object|boolean|null|undefined>;
// Actions that trigger formula calculations in the data engine
export const CALCULATING_USER_ACTIONS = new Set(['Calculate', 'UpdateCurrentTime', 'RespondToRequests']);
/**
* Gives a description for an action which involves setting values to a selection.
* @param {Array} action - The (Bulk)AddRecord/(Bulk)UpdateRecord action to describe.

View File

@ -4,7 +4,13 @@
* change events.
*/
import {getEnvContent, LocalActionBundle, SandboxActionBundle, UserActionBundle} from 'app/common/ActionBundle';
import {
getEnvContent,
LocalActionBundle,
SandboxActionBundle,
SandboxRequest,
UserActionBundle
} from 'app/common/ActionBundle';
import {ActionGroup, MinimalActionGroup} from 'app/common/ActionGroup';
import {ActionSummary} from "app/common/ActionSummary";
import {
@ -33,12 +39,7 @@ import {
UserAction
} from 'app/common/DocActions';
import {DocData} from 'app/common/DocData';
import {
getDataLimitRatio,
getDataLimitStatus,
getSeverity,
LimitExceededError,
} from 'app/common/DocLimits';
import {getDataLimitRatio, getDataLimitStatus, getSeverity, LimitExceededError} from 'app/common/DocLimits';
import {DocSnapshots} from 'app/common/DocSnapshot';
import {DocumentSettings} from 'app/common/DocumentSettings';
import {
@ -75,6 +76,7 @@ import {GRIST_DOC_SQL, GRIST_DOC_WITH_TABLE1_SQL} from 'app/server/lib/initialDo
import {ISandbox} from 'app/server/lib/ISandbox';
import * as log from 'app/server/lib/log';
import {LogMethods} from "app/server/lib/LogMethods";
import {DocRequests} from 'app/server/lib/Requests';
import {shortDesc} from 'app/server/lib/shortDesc';
import {TableMetadataLoader} from 'app/server/lib/TableMetadataLoader';
import {DocTriggers} from "app/server/lib/Triggers";
@ -182,6 +184,7 @@ export class ActiveDoc extends EventEmitter {
private _log = new LogMethods('ActiveDoc ', (s: OptDocSession) => this.getLogMeta(s));
private _triggers: DocTriggers;
private _requests: DocRequests;
private _dataEngine: Promise<ISandbox>|undefined;
private _activeDocImport: ActiveDocImport;
private _onDemandActions: OnDemandActions;
@ -270,6 +273,7 @@ export class ActiveDoc extends EventEmitter {
this.docStorage = new DocStorage(docManager.storageManager, docName);
this.docClients = new DocClients(this);
this._triggers = new DocTriggers(this);
this._requests = new DocRequests(this);
this._actionHistory = new ActionHistoryImpl(this.docStorage);
this.docPluginManager = new DocPluginManager(docManager.pluginManager.getPlugins(),
docManager.pluginManager.appRoot!, this, this._docManager.gristServer);
@ -1095,7 +1099,7 @@ export class ActiveDoc extends EventEmitter {
this.dataLimitStatus === "deleteOnly" &&
!actions.every(action => [
'RemoveTable', 'RemoveColumn', 'RemoveRecord', 'BulkRemoveRecord',
'RemoveViewSection', 'RemoveView', 'ApplyUndoActions',
'RemoveViewSection', 'RemoveView', 'ApplyUndoActions', 'RespondToRequests',
].includes(action[0] as string))
) {
throw new Error("Document is in delete-only mode");
@ -1420,6 +1424,10 @@ export class ActiveDoc extends EventEmitter {
}
const user = docSession ? await this._granularAccess.getCachedUser(docSession) : undefined;
sandboxActionBundle = await this._rawPyCall('apply_user_actions', normalActions, user?.toJSON());
const {requests} = sandboxActionBundle;
if (requests) {
this._requests.handleRequestsBatchFromUserActions(requests).catch(e => console.error(e));
}
await this._reportDataEngineMemory();
} else {
// Create default SandboxActionBundle to use if the data engine is not called.
@ -2087,6 +2095,7 @@ export class ActiveDoc extends EventEmitter {
preferredPythonVersion,
sandboxOptions: {
exports: {
request: (key: string, args: SandboxRequest) => this._requests.handleSingleRequestWithCache(key, args),
guessColInfo: (values: Array<string | null>) =>
guessColInfoWithDocData(values, this.docData!),
convertFromColumn: (...args: Parameters<ReturnType<typeof convertFromColumn>>) =>

View File

@ -4,7 +4,13 @@ import { ActionGroup } from 'app/common/ActionGroup';
import { createEmptyActionSummary } from 'app/common/ActionSummary';
import { ServerQuery } from 'app/common/ActiveDocAPI';
import { ApiError } from 'app/common/ApiError';
import { AddRecord, BulkAddRecord, BulkColValues, BulkRemoveRecord, BulkUpdateRecord } from 'app/common/DocActions';
import {
AddRecord,
BulkAddRecord,
BulkColValues,
BulkRemoveRecord,
BulkUpdateRecord,
} from 'app/common/DocActions';
import { RemoveRecord, ReplaceTableData, UpdateRecord } from 'app/common/DocActions';
import { CellValue, ColValues, DocAction, getTableId, isSchemaAction } from 'app/common/DocActions';
import { TableDataAction, UserAction } from 'app/common/DocActions';

128
app/server/lib/Requests.ts Normal file
View File

@ -0,0 +1,128 @@
import {SandboxRequest} from 'app/common/ActionBundle';
import {ActiveDoc} from 'app/server/lib/ActiveDoc';
import {makeExceptionalDocSession} from 'app/server/lib/DocSession';
import {httpEncoding} from 'app/server/lib/httpEncoding';
import {HttpsProxyAgent} from 'https-proxy-agent';
import {HttpProxyAgent} from 'http-proxy-agent';
import fetch from 'node-fetch';
import * as path from 'path';
import * as tmp from 'tmp';
import chunk = require('lodash/chunk');
import fromPairs = require('lodash/fromPairs');
import zipObject = require('lodash/zipObject');
import * as fse from 'fs-extra';
import * as log from 'app/server/lib/log';
export class DocRequests {
// Request responses are briefly cached in files only to handle multiple requests in a formula
// and only as long as needed to finish calculating all formulas.
// When _numPending reaches 0 again, _cacheDir is deleted.
private _numPending: number = 0;
private _cacheDir: tmp.SynchrounousResult | null = null;
constructor(private readonly _activeDoc: ActiveDoc) {}
public async handleRequestsBatchFromUserActions(requests: Record<string, SandboxRequest>) {
const numRequests = Object.keys(requests).length;
this._numPending += numRequests;
try {
// Perform batches of requests in parallel for speed, and hope it doesn't cause rate limiting...
for (const keys of chunk(Object.keys(requests), 10)) {
const responses: Response[] = await Promise.all(keys.map(async key => {
const request = requests[key];
const response = await this.handleSingleRequestWithCache(key, request);
return {
...response,
// Tells the engine which cell(s) made the request and should be recalculated to use the response
deps: request.deps,
};
}));
// Tell the sandbox which previous responses we have cached in files.
// This lets it know it can immediately and synchronously get those responses again.
const cachedRequestKeys = await fse.readdir(this._cacheDir!.name);
// Recalculate formulas using this batch of responses.
const action = ["RespondToRequests", zipObject(keys, responses), cachedRequestKeys];
await this._activeDoc.applyUserActions(makeExceptionalDocSession("system"), [action]);
}
} finally {
this._numPending -= numRequests;
if (this._numPending === 0) {
log.debug(`Removing DocRequests._cacheDir: ${this._cacheDir!.name}`);
this._cacheDir!.removeCallback();
this._cacheDir = null;
}
}
}
public async handleSingleRequestWithCache(key: string, request: SandboxRequest): Promise<Response> {
if (!this._cacheDir) {
// Use the sync API because otherwise multiple requests being handled at the same time
// all reach this point, `await`, and create different dirs.
// `unsafeCleanup: true` means the directory can be deleted even if it's not empty, which is what we expect.
this._cacheDir = tmp.dirSync({unsafeCleanup: true});
log.debug(`Created DocRequests._cacheDir: ${this._cacheDir.name}`);
}
const cachePath = path.resolve(this._cacheDir.name, key);
try {
const result = await fse.readJSON(cachePath);
result.content = Buffer.from(result.content, "base64");
return result;
} catch {
const result = await this._handleSingleRequestRaw(request);
const resultForJson = {...result} as any;
if ('content' in result) {
resultForJson.content = result.content.toString("base64");
}
fse.writeJSON(cachePath, resultForJson).catch(e => log.warn(`Failed to save response to cache file: ${e}`));
return result;
}
}
private async _handleSingleRequestRaw(request: SandboxRequest): Promise<Response> {
try {
if (process.env.GRIST_EXPERIMENTAL_PLUGINS != '1') {
throw new Error("REQUEST is not enabled");
}
const {url, params, headers} = request;
const urlObj = new URL(url);
log.rawInfo("Handling sandbox request", {host: urlObj.host, docId: this._activeDoc.docName});
for (const [param, value] of Object.entries(params || {})) {
urlObj.searchParams.append(param, value);
}
const response = await fetch(urlObj.toString(), {headers: headers || {}, agent: proxyAgent(urlObj)});
const content = await response.buffer();
const {status, statusText} = response;
const encoding = httpEncoding(response.headers.get('content-type'), content);
return {
content, status, statusText, encoding,
headers: fromPairs([...response.headers]),
};
} catch (e) {
return {error: String(e)};
}
}
}
interface SuccessfulResponse {
content: Buffer;
status: number;
statusText: string;
encoding?: string;
headers: Record<string, string>;
}
interface RequestError {
error: string;
}
type Response = RequestError | SuccessfulResponse;
function proxyAgent(requestUrl: URL) {
const proxy = process.env.GRIST_HTTPS_PROXY;
if (!proxy) {
return undefined;
}
const ProxyAgent = requestUrl.protocol === "https:" ? HttpsProxyAgent : HttpProxyAgent;
return new ProxyAgent(proxy);
}

View File

@ -6,7 +6,7 @@ import {
LocalActionBundle,
UserActionBundle
} from 'app/common/ActionBundle';
import {DocAction, getNumRows, UserAction} from 'app/common/DocActions';
import {CALCULATING_USER_ACTIONS, DocAction, getNumRows, UserAction} from 'app/common/DocActions';
import {allToken} from 'app/common/sharing';
import * as log from 'app/server/lib/log';
import {LogMethods} from "app/server/lib/LogMethods";
@ -215,8 +215,7 @@ export class Sharing {
try {
const isCalculate = (userActions.length === 1 &&
(userActions[0][0] === 'Calculate' || userActions[0][0] === 'UpdateCurrentTime'));
const isCalculate = (userActions.length === 1 && CALCULATING_USER_ACTIONS.has(userActions[0][0] as string));
// `internal` is true if users shouldn't be able to undo the actions. Applies to:
// - Calculate/UpdateCurrentTime because it's not considered as performed by a particular client.
// - Adding attachment metadata when uploading attachments,

View File

@ -0,0 +1,43 @@
// Based on the source code of the Body.textConverted method in node-fetch
export function httpEncoding(header: string | null, content: Buffer): string | undefined {
let res: RegExpExecArray | null = null;
// header
if (header) {
res = /charset=([^;]*)/i.exec(header);
}
// no charset in content type, peek at response body for at most 1024 bytes
const str = content.slice(0, 1024).toString();
// html5
if (!res && str) {
res = /<meta.+?charset=(['"])(.+?)\1/i.exec(str);
}
// html4
if (!res && str) {
res = /<meta\s+?http-equiv=(['"])content-type\1\s+?content=(['"])(.+?)\2/i.exec(str);
if (res) {
res = /charset=(.*)/i.exec(res.pop()!);
}
}
// xml
if (!res && str) {
res = /<\?xml.+?encoding=(['"])(.+?)\1/i.exec(str);
}
// found charset
if (res) {
let charset = res.pop();
// prevent decode issues when sites use incorrect encoding
// ref: https://hsivonen.fi/encoding-menu/
if (charset === 'gb2312' || charset === 'gbk') {
charset = 'gb18030';
}
return charset;
}
}

View File

@ -1,5 +1,6 @@
{
"compilerOptions": {
"allowSyntheticDefaultImports": true,
"target": "es2016",
"module": "commonjs",
"strict": true,

View File

@ -112,6 +112,8 @@
"grain-rpc": "0.1.7",
"grainjs": "1.0.2",
"highlight.js": "9.13.1",
"http-proxy-agent": "5.0.0",
"https-proxy-agent": "5.0.1",
"i18n-iso-countries": "6.1.0",
"image-size": "0.6.3",
"jquery": "2.2.1",

View File

@ -27,6 +27,7 @@ class ActionGroup(object):
self.undo = []
self.retValues = []
self.summary = ActionSummary()
self.requests = {}
def flush_calc_changes(self):
"""

View File

@ -30,6 +30,7 @@ import match_counter
import objtypes
from objtypes import strict_equal
from relation import SingleRowsIdentityRelation
import sandbox
import schema
from schema import RecalcWhen
import table as table_module
@ -66,6 +67,16 @@ class OrderError(Exception):
self.requiring_node = node
self.requiring_row_id = row_id
class RequestingError(Exception):
"""
An exception thrown and handled internally, a bit like OrderError.
Indicates that the formula called the REQUEST function and needs to delegate an HTTP request
to the NodeJS server.
"""
pass
# An item of work to be done by Engine._update
WorkItem = namedtuple('WorkItem', ('node', 'row_ids', 'locks'))
@ -186,6 +197,7 @@ class Engine(object):
# What's currently being computed
self._current_node = None
self._current_row_id = None
self._is_current_node_formula = False # True for formula columns, False for trigger formulas
# Certain recomputations are triggered by a particular doc action. This keep track of it.
self._triggering_doc_action = None
@ -236,6 +248,19 @@ class Engine(object):
self._table_stats = {"meta": [], "user": []}
#### Attributes used by the REQUEST function:
# True when the formula should synchronously call the exported JS method to make the request
# immediately instead of reevaluating the formula later. Used when reevaluating a single
# formula cell to get an error traceback.
self._sync_request = False
# dict of string keys to responses, set by the RespondToRequests user action to reevaluate
# formulas based on a batch of completed requests.
self._request_responses = {}
# set of string keys identifying requests that are currently cached in files and can thus
# be fetched synchronously via the exported JS method. This allows a single formula to
# make multiple different requests without needing to keep all the responses in memory.
self._cached_request_keys = set()
@property
def autocomplete_context(self):
# See the comment on _autocomplete_context in __init__ above.
@ -480,7 +505,7 @@ class Engine(object):
if self._peeking:
return
if self._current_node:
if self._is_current_node_formula:
# Add an edge to indicate that the node being computed depends on the node passed in.
# Note that during evaluation, we only *add* dependencies. We *remove* them by clearing them
# whenever ALL rows for a node are invalidated (on schema changes and reloads).
@ -671,6 +696,8 @@ class Engine(object):
table = self.tables[table_id]
col = table.get_column(col_id)
checkpoint = self._get_undo_checkpoint()
# Makes calls to REQUEST synchronous, since raising a RequestingError can't work here.
self._sync_request = True
try:
result = self._recompute_one_cell(table, col, row_id)
# If the error is gone for a trigger formula
@ -686,6 +713,7 @@ class Engine(object):
# It is possible for formula evaluation to have side-effects that produce DocActions (e.g.
# lookupOrAddDerived() creates those). In case of get_formula_error(), these aren't fully
# processed (e.g. don't get applied to DocStorage), so it's important to reverse them.
self._sync_request = False
self._undo_to_checkpoint(checkpoint)
def _recompute(self, node, row_ids=None):
@ -757,9 +785,11 @@ class Engine(object):
require_rows = sorted(require_rows or [])
previous_current_node = self._current_node
previous_is_current_node_formula = self._is_current_node_formula
self._current_node = node
# Prevents dependency creation for non-formula nodes. A non-formula column may include a
# formula to eval for a newly-added record. Those shouldn't create dependencies.
self._current_node = node if col.is_formula() else None
self._is_current_node_formula = col.is_formula()
changes = None
cleaned = [] # this lists row_ids that can be removed from dirty_rows once we are no
@ -789,11 +819,16 @@ class Engine(object):
# For common-case formulas, all cells in a column are likely to fail in the same way,
# so don't bother trying more from this column until we've reordered.
return
making_request = False
try:
# We figure out if we've hit a cycle here. If so, we just let _recompute_on_cell
# know, so it can set the cell value appropriately and do some other bookkeeping.
cycle = required and (node, row_id) in self._locked_cells
value = self._recompute_one_cell(table, col, row_id, cycle=cycle, node=node)
except RequestingError:
making_request = True
value = RequestingError
except OrderError as e:
if not required:
# We're out of order, but for a cell we were evaluating opportunistically.
@ -822,19 +857,24 @@ class Engine(object):
if column.is_validation_column_name(col.col_id):
value = (value in (True, None))
# Convert the value, and if needed, set, and include into the returned action.
value = col.convert(value)
previous = col.raw_get(row_id)
if not strict_equal(value, previous):
if not changes:
changes = self._changes_map.setdefault(node, [])
changes.append((row_id, previous, value))
col.set(row_id, value)
# When the formula raises a RequestingError, leave the existing value in the cell.
# The formula will be evaluated again soon when we have a response.
if not making_request:
# Convert the value, and if needed, set, and include into the returned action.
value = col.convert(value)
previous = col.raw_get(row_id)
if not strict_equal(value, previous):
if not changes:
changes = self._changes_map.setdefault(node, [])
changes.append((row_id, previous, value))
col.set(row_id, value)
exclude.add(row_id)
cleaned.append(row_id)
self._recompute_done_counter += 1
finally:
self._current_node = previous_current_node
self._is_current_node_formula = previous_is_current_node_formula
# Usually dirty_rows refers to self.recompute_map[node], so this modifies both
dirty_rows -= cleaned
@ -844,6 +884,43 @@ class Engine(object):
if not self.recompute_map[node]:
self.recompute_map.pop(node)
def _requesting(self, key, args):
"""
Called by the REQUEST function. If we don't have a response already and we can't
synchronously get it from the JS side, then note the request to be made in JS asynchronously
and raise RequestingError to indicate that the formula
should be evaluated again later when we have a response.
"""
# This will make the formula reevaluate periodically with the UpdateCurrentTime action.
# This assumes that the response changes with time and having the latest data is ideal.
# We will probably want to reconsider this to avoid making unwanted requests,
# along with avoiding refreshing the request when the doc is loaded with the Calculate action.
self.use_current_time()
if key in self._request_responses:
# This formula is being reevaluated in a RespondToRequests action, and the response is ready.
return self._request_responses[key]
elif self._sync_request or key in self._cached_request_keys:
# Not always ideal, but in this case the best strategy is to make the request immediately
# and block while waiting for a response.
return sandbox.call_external("request", key, args)
# We can't get a response to this request now. Note the request so it can be delegated.
table_id, column_id = self._current_node
(self.out_actions.requests # `out_actions.requests` is returned by apply_user_actions
# Here is where the request arguments are stored if they haven't been already
.setdefault(key, args)
# While all this stores the cell that made the request so that it can be invalidated later
.setdefault("deps", {})
.setdefault(table_id, {})
.setdefault(column_id, [])
.append(self._current_row_id))
# As with OrderError, note the exception so it gets raised even if the formula catches it
self._cell_required_error = RequestingError()
raise RequestingError()
def _recompute_one_cell(self, table, col, row_id, cycle=False, node=None):
"""
Recomputes an one formula cell and returns a value.
@ -1198,6 +1275,11 @@ class Engine(object):
self.out_actions = action_obj.ActionGroup()
self._user = User(user, self.tables) if user else None
# These should usually be empty, but may be populated by the RespondToRequests action.
self._request_responses = {}
self._cached_request_keys = set()
checkpoint = self._get_undo_checkpoint()
try:
for user_action in user_actions:
@ -1252,6 +1334,8 @@ class Engine(object):
self.out_actions.flush_calc_changes()
self.out_actions.check_sanity()
self._user = None
self._request_responses = {}
self._cached_request_keys = set()
return self.out_actions
def acl_split(self, action_group):

View File

@ -3,10 +3,13 @@
from __future__ import absolute_import
import datetime
import hashlib
import json
import math
import numbers
import re
import chardet
import six
import column
@ -656,3 +659,91 @@ def is_error(value):
return ((value is _error_sentinel)
or isinstance(value, AltText)
or (isinstance(value, float) and math.isnan(value)))
@unimplemented # exclude from autocomplete while in beta
def REQUEST(url, params=None, headers=None):
# Makes a GET HTTP request with an API similar to `requests.get`.
# Actually jumps through hoops internally to make the request asynchronously (usually)
# while feeling synchronous to the formula writer.
# Requests are identified by a string key in various places.
# The same arguments should produce the same key so the request is only made once.
args = dict(url=url, params=params, headers=headers)
args_json = json.dumps(args, sort_keys=True)
key = hashlib.sha256(args_json.encode()).hexdigest()
# This may either return the raw response data or it may raise a special exception
# to delegate the request and reevaluate the formula later.
response_dict = docmodel.global_docmodel._engine._requesting(key, args)
if "error" in response_dict:
# Indicates a complete failure to make the request, such as a connection problem.
# An unsuccessful status code like 404 or 500 doesn't raise this error.
raise HTTPError(response_dict["error"])
return Response(**response_dict)
class HTTPError(Exception):
pass
class Response(object):
"""
Similar to the Response class from the `requests` library.
"""
def __init__(self, content, status, statusText, headers, encoding):
self.content = content # raw bytes
self.status_code = status # e.g. 404
self.reason = statusText # e.g. "Not Found"
self.headers = CaseInsensitiveDict(headers)
self.encoding = encoding or self.apparent_encoding
@property
def text(self):
return self.content.decode(self.encoding)
def json(self, **kwargs):
return json.loads(self.text, **kwargs)
@property
def ok(self):
return self.status_code < 400
def raise_for_status(self):
if not self.ok:
raise HTTPError("Request failed with status %s" % self.status_code)
@property
def apparent_encoding(self):
return chardet.detect(self.content)["encoding"]
def close(self):
pass # nothing to do
class CaseInsensitiveDict(dict):
"""
Similar to dict but treats all keys (which must be strings) case-insensitively,
e.g. `d["foo"]` and `d["FOO"]` are equivalent.
"""
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
for k in list(self):
# Convert key to lowercase
self[k] = dict.pop(self, k)
def update(self, E=None, **F):
dict.update(self.__class__(E or {}))
dict.update(self.__class__(**F))
def _forward_dict_method(name):
# Replace method 'name' where the first argument is a key with a version that lowercases the key
def method(self, key, *args, **kwargs):
return getattr(dict, name)(self, key.lower(), *args, **kwargs)
return method
for _name in "__getitem__ __setitem__ __delitem__ __contains__ get setdefault pop has_key".split():
setattr(CaseInsensitiveDict, _name, _forward_dict_method(_name))

View File

@ -126,7 +126,7 @@ class BaseLookupMapColumn(column.BaseColumn):
"""
key = tuple(_extract(val) for val in key)
engine = self._engine
if engine._current_node:
if engine._is_current_node_formula:
rel = self._get_relation(engine._current_node)
rel._add_lookup(engine._current_row_id, key)
else:

View File

@ -68,10 +68,13 @@ def run(sandbox):
@export
def apply_user_actions(action_reprs, user=None):
action_group = eng.apply_user_actions([useractions.from_repr(u) for u in action_reprs], user)
return dict(
result = dict(
rowCount=eng.count_rows(),
**eng.acl_split(action_group).to_json_obj()
)
if action_group.requests:
result["requests"] = action_group.requests
return result
@export
def fetch_table(table_id, formulas=True, query=None):

View File

@ -0,0 +1,136 @@
# coding=utf-8
import unittest
import test_engine
import testutil
from functions import CaseInsensitiveDict, Response, HTTPError
class TestCaseInsensitiveDict(unittest.TestCase):
def test_case_insensitive_dict(self):
d = CaseInsensitiveDict({"FOO": 1})
for key in ["foo", "FOO", "Foo"]:
self.assertEqual(d, {"foo": 1})
self.assertEqual(list(d), ["foo"])
self.assertEqual(d, CaseInsensitiveDict({key: 1}))
self.assertIn(key, d)
self.assertEqual(d[key], 1)
self.assertEqual(d.get(key), 1)
self.assertEqual(d.get(key, 2), 1)
self.assertEqual(d.get(key + "2", 2), 2)
self.assertEqual(d.pop(key), 1)
self.assertEqual(d, {})
self.assertEqual(d.setdefault(key, 3), 3)
self.assertEqual(d, {"foo": 3})
self.assertEqual(d.setdefault(key, 4), 3)
self.assertEqual(d, {"foo": 3})
del d[key]
self.assertEqual(d, {})
d[key] = 1
class TestResponse(unittest.TestCase):
def test_ok_response(self):
r = Response(b"foo", 200, "OK", {"X-header": "hi"}, None)
self.assertEqual(r.content, b"foo")
self.assertEqual(r.text, u"foo")
self.assertEqual(r.status_code, 200)
self.assertEqual(r.ok, True)
self.assertEqual(r.reason, "OK")
self.assertEqual(r.headers, {"x-header": "hi"})
self.assertEqual(r.encoding, "ascii")
self.assertEqual(r.apparent_encoding, "ascii")
r.raise_for_status()
r.close()
def test_error_response(self):
r = Response(b"foo", 500, "Server error", {}, None)
self.assertEqual(r.status_code, 500)
self.assertEqual(r.ok, False)
self.assertEqual(r.reason, "Server error")
with self.assertRaises(HTTPError) as cm:
r.raise_for_status()
self.assertEqual(str(cm.exception), "Request failed with status 500")
def test_json(self):
r = Response(b'{"foo": "bar"}', 200, "OK", {}, None)
self.assertEqual(r.json(), {"foo": "bar"})
def test_encoding_direct(self):
r = Response(b"foo", 200, "OK", {}, "some encoding")
self.assertEqual(r.encoding, "some encoding")
self.assertEqual(r.apparent_encoding, "ascii")
def test_apparent_encoding(self):
text = u"编程"
encoding = "utf-8"
content = text.encode(encoding)
self.assertEqual(content.decode(encoding), text)
r = Response(content, 200, "OK", {}, "")
self.assertEqual(r.encoding, encoding)
self.assertEqual(r.apparent_encoding, encoding)
self.assertEqual(r.content, content)
self.assertEqual(r.text, text)
class TestRequestFunction(test_engine.EngineTestCase):
sample = testutil.parse_test_sample({
"SCHEMA": [
[1, "Table1", [
[2, "Request", "Any", True, "$id", "", ""],
[3, "Other", "Any", True, "", "", ""],
]],
],
"DATA": {
"Table1": [
["id"],
[1],
[2],
],
}
})
def test_request_function(self):
self.load_sample(self.sample)
formula = """
r = REQUEST('my_url', headers={'foo': 'bar'}, params={'b': 1, 'a': 2})
r.__dict__
"""
out_actions = self.modify_column("Table1", "Request", formula=formula)
key = '9d305be9664924aaaf7ebb0bab2e4155d1fa1b9dcde53e417f1a9f9a2c7e09b9'
deps = {'Table1': {'Request': [1, 2]}}
args = {
'url': 'my_url',
'headers': {'foo': 'bar'},
'params': {'a': 2, 'b': 1},
'deps': deps,
}
self.assertEqual(out_actions.requests, {key: args})
self.assertTableData("Table1", cols="subset", data=[
["id", "Request"],
[1, 1],
[2, 2],
])
response = {
'status': 200,
'statusText': 'OK',
'content': b'body',
'headers': {'h1': 'h2'},
'encoding': 'utf16',
'deps': deps,
}
self.apply_user_action(["RespondToRequests", {key: response.copy()}, [key]])
# Translate names from JS `fetch` API to Python `requests`-style API
response["status_code"] = response.pop("status")
response["reason"] = response.pop("statusText")
# This is sent in the user action but not kept for the response object
del response["deps"]
self.assertTableData("Table1", cols="subset", data=[
["id", "Request"],
[1, response],
[2, response],
])

View File

@ -9,6 +9,7 @@ import six
from six.moves import xrange
import acl
import depend
import gencode
from acl_formula import parse_acl_formula_json
import actions
@ -331,6 +332,25 @@ class UserActions(object):
"""
self._engine.update_current_time()
@useraction
def RespondToRequests(self, responses, cached_keys):
"""
Reevaluate formulas which called the REQUEST function using the now available responses.
"""
engine = self._engine
# The actual raw responses which will be returned to the REQUEST function
engine._request_responses = responses
# Keys for older requests which are stored in files and can be retrieved synchronously
engine._cached_request_keys = set(cached_keys)
# Invalidate the exact cells which made the exact requests which are being responded to here.
for response in six.itervalues(responses):
for table_id, table_deps in six.iteritems(response.pop("deps")):
for col_id, row_ids in six.iteritems(table_deps):
node = depend.Node(table_id, col_id)
engine.dep_graph.invalidate_deps(node, row_ids, engine.recompute_map)
#----------------------------------------
# User actions on records.
#----------------------------------------

View File

@ -179,6 +179,11 @@
resolved "https://registry.yarnpkg.com/@tokenizer/token/-/token-0.1.1.tgz#f0d92c12f87079ddfd1b29f614758b9696bc29e3"
integrity sha512-XO6INPbZCxdprl+9qa/AAbFFOMzzwqYxpjPgLICrMD6C2FCw6qfJOPcBk6JqqPLSaZ/Qx87qn4rpPmPMwaAK6w==
"@tootallnate/once@2":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-2.0.0.tgz#f544a148d3ab35801c1f633a7441fd87c2e484bf"
integrity sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==
"@types/accept-language-parser@1.5.2":
version "1.5.2"
resolved "https://registry.yarnpkg.com/@types/accept-language-parser/-/accept-language-parser-1.5.2.tgz#ea48ed07a3dc9d2ba6666d45c018ad1b5e59d665"
@ -3647,6 +3652,15 @@ http-errors@~1.8.0:
statuses ">= 1.5.0 < 2"
toidentifier "1.0.0"
http-proxy-agent@5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz#5129800203520d434f142bc78ff3c170800f2b43"
integrity sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==
dependencies:
"@tootallnate/once" "2"
agent-base "6"
debug "4"
http-signature@~1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1"
@ -3661,6 +3675,14 @@ https-browserify@^1.0.0:
resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=
https-proxy-agent@5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6"
integrity sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==
dependencies:
agent-base "6"
debug "4"
https-proxy-agent@5.0.0, https-proxy-agent@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2"