(core) Allow using negative rowIds to add records and refer to them in Reference values.

Summary:
- When adding records, negative rowIds may now be specified. They'll be replaced by proper IDs.
- If these negative IDs are used in Reference columns in subsequent actions in
  the same bundle of UserActions, they get replaced with the proper rowIds.
- Use this to sync ACLResources and ACLRules from UI in a single batch of UserActions.
- Resolve the TODOs in GranularAccess test, to no longer need to guess resource rowIds.

Test Plan: Added a python unittest for mapping IDs; updated browser tests.

Reviewers: paulfitz

Reviewed By: paulfitz

Differential Revision: https://phab.getgrist.com/D2691
This commit is contained in:
Dmitry S 2020-12-15 14:37:55 -05:00
parent b2fabb0ebc
commit 9f806de64b
6 changed files with 219 additions and 97 deletions

View File

@ -110,88 +110,88 @@ export class AccessRules extends Disposable {
// Note that if anything has changed, we apply changes relative to the current state of the
// ACL tables (they may have changed by other users). So our changes will win.
// TODO: There is a race condition if two people save different rules at the same time, since
// it's a two-step operation (syncing resources and rules).
const docData = this._gristDoc.docData;
const resourcesTable = docData.getTable('_grist_ACLResources')!;
const rulesTable = docData.getTable('_grist_ACLRules')!;
await docData.bundleActions(null, async () => {
// Add/remove resources to have just the ones we need.
const newResources: RowRecord[] = flatten(
[{tableId: '*', colIds: '*'}], ...this._tableRules.get().map(t => t.getResources()))
.map(r => ({id: -1, ...r}));
// Add/remove resources to have just the ones we need.
const newResources: RowRecord[] = flatten(
[{tableId: '*', colIds: '*'}], ...this._tableRules.get().map(t => t.getResources()))
.map(r => ({id: -1, ...r}));
const newResourceMap = await syncRecords(resourcesTable, newResources, serializeResource);
// Prepare userActions and a mapping of serializedResource to rowIds.
const resourceSync = syncRecords(resourcesTable, newResources, serializeResource);
// For syncing rules, we'll go by rowId that we store with each RulePart and with the RuleSet.
// New rules will get temporary negative rowIds.
let nextId: number = -1;
const newRules: RowRecord[] = [];
for (const rule of this.getRules()) {
// We use id of 0 internally to mark built-in rules. Skip those.
if (rule.id === 0) {
continue;
// For syncing rules, we'll go by rowId that we store with each RulePart and with the RuleSet.
const newRules: RowRecord[] = [];
for (const rule of this.getRules()) {
// We use id of 0 internally to mark built-in rules. Skip those.
if (rule.id === 0) {
continue;
}
// Look up the rowId for the resource.
const resourceKey = serializeResource(rule.resourceRec as RowRecord);
const resourceRowId = resourceSync.rowIdMap.get(resourceKey);
if (!resourceRowId) {
throw new Error(`Resource missing in resource map: ${resourceKey}`);
}
newRules.push({
id: rule.id || -1,
resource: resourceRowId,
aclFormula: rule.aclFormula!,
permissionsText: rule.permissionsText!,
rulePos: rule.rulePos || null,
});
}
// UserAttribute rules are listed in the same rulesTable.
const defaultResourceRowId = resourceSync.rowIdMap.get(serializeResource({id: -1, tableId: '*', colIds: '*'}));
if (!defaultResourceRowId) {
throw new Error('Default resource missing in resource map');
}
for (const userAttr of this._userAttrRules.get()) {
const rule = userAttr.getRule();
newRules.push({
id: rule.id || -1,
resource: defaultResourceRowId,
rulePos: rule.rulePos || null,
userAttributes: rule.userAttributes,
});
}
// We need to fill in rulePos values. We'll add them in the order the rules are listed (since
// this.getRules() returns them in a suitable order), keeping rulePos unchanged when possible.
let lastGoodRulePos = 0;
let lastGoodIndex = -1;
for (let i = 0; i < newRules.length; i++) {
const pos = newRules[i].rulePos as number;
if (pos && pos > lastGoodRulePos) {
const step = (pos - lastGoodRulePos) / (i - lastGoodIndex);
for (let k = lastGoodIndex + 1; k < i; k++) {
newRules[k].rulePos = step * (k - lastGoodIndex);
}
lastGoodRulePos = pos;
lastGoodIndex = i;
}
}
// Fill in the rulePos values for the remaining rules.
for (let k = lastGoodIndex + 1; k < newRules.length; k++) {
newRules[k].rulePos = ++lastGoodRulePos;
}
// Prepare the UserActions for syncing the Rules table.
const rulesSync = syncRecords(rulesTable, newRules);
// Look up the rowId for the resource.
const resourceKey = serializeResource(rule.resourceRec as RowRecord);
const resourceRowId = newResourceMap.get(resourceKey);
if (!resourceRowId) {
throw new Error(`Resource missing in resource map: ${resourceKey}`);
}
newRules.push({
id: rule.id || (nextId--),
resource: resourceRowId,
aclFormula: rule.aclFormula!,
permissionsText: rule.permissionsText!,
rulePos: rule.rulePos || null,
});
}
// UserAttribute rules are listed in the same rulesTable.
const defaultResourceRowId = newResourceMap.get(serializeResource({id: -1, tableId: '*', colIds: '*'}));
if (!defaultResourceRowId) {
throw new Error('Default resource missing in resource map');
}
for (const userAttr of this._userAttrRules.get()) {
const rule = userAttr.getRule();
newRules.push({
id: rule.id || (nextId--),
resource: defaultResourceRowId,
rulePos: rule.rulePos || null,
userAttributes: rule.userAttributes,
});
}
// We need to fill in rulePos values. We'll add them in the order the rules are listed (since
// this.getRules() returns them in a suitable order), keeping rulePos unchanged when possible.
let lastGoodRulePos = 0;
let lastGoodIndex = -1;
for (let i = 0; i < newRules.length; i++) {
const pos = newRules[i].rulePos as number;
if (pos && pos > lastGoodRulePos) {
const step = (pos - lastGoodRulePos) / (i - lastGoodIndex);
for (let k = lastGoodIndex + 1; k < i; k++) {
newRules[k].rulePos = step * (k - lastGoodIndex);
}
lastGoodRulePos = pos;
lastGoodIndex = i;
}
}
// Fill in the rulePos values for the remaining rules.
for (let k = lastGoodIndex + 1; k < newRules.length; k++) {
newRules[k].rulePos = ++lastGoodRulePos;
}
// Finally we can sync the records.
await syncRecords(rulesTable, newRules);
}).catch(e => {
// Finally collect and apply all the actions together.
try {
await docData.sendActions([...resourceSync.userActions, ...rulesSync.userActions]);
} catch (e) {
// Report the error, but go on to update the rules. The user may lose their entries, but
// will see what's in the document. To preserve entries and show what's wrong, we try to
// catch errors earlier.
reportError(e);
});
}
// Re-populate the state from DocData once the records are synced.
await this.update();
@ -721,22 +721,30 @@ class ObsRulePart extends Disposable {
/**
* Produce and apply UserActions to create/update/remove records, to replace data in tableData
* with newRecords. Records are matched on uniqueId(record), which defaults to returning record.id
* (unique negative IDs may be used for new records). The returned Map maps uniqueId(record) to
* rowId for all existing and newly added records.
* Produce UserActions to create/update/remove records, to replace data in tableData
* with newRecords. Records are matched on uniqueId(record), which defaults to returning
* String(record.id). UniqueIds of new records don't need to be unique as long as they don't
* overlap with uniqueIds of existing records.
*
* Return also a rowIdMap, mapping uniqueId(record) to a rowId used in the actions. The rowIds may
* include negative values (auto-generated when newRecords doesn't include one). These may be used
* in Reference values within the same action bundle.
*
* TODO This is a general-purpose function, and should live in a separate module.
*/
async function syncRecords(tableData: TableData, newRecords: RowRecord[],
uniqueId: (r: RowRecord) => string = (r => String(r.id))
): Promise<Map<string, number>> {
function syncRecords(tableData: TableData, newRecords: RowRecord[],
uniqueId: (r: RowRecord) => string = (r => String(r.id))
): {userActions: UserAction[], rowIdMap: Map<string, number>} {
const oldRecords = tableData.getRecords();
const oldRecordMap = new Map<string, RowRecord>(oldRecords.map(r => [uniqueId(r), r]));
const rowIdMap = new Map<string, number>(oldRecords.map(r => [uniqueId(r), r.id]));
const newRecordMap = new Map<string, RowRecord>(newRecords.map(r => [uniqueId(r), r]));
const removedRecords: RowRecord[] = oldRecords.filter(r => !newRecordMap.has(uniqueId(r)));
const addedRecords: RowRecord[] = newRecords.filter(r => !oldRecordMap.has(uniqueId(r)));
// Generate a unique negative rowId for each added record.
const addedRecords: RowRecord[] = newRecords.filter(r => !rowIdMap.has(uniqueId(r)))
.map((r, index) => ({...r, id: -(index + 1)}));
// Array of [before, after] pairs for changed records.
const updatedRecords: Array<[RowRecord, RowRecord]> = oldRecords.map((r): ([RowRecord, RowRecord]|null) => {
const newRec = newRecordMap.get(uniqueId(r));
@ -749,28 +757,21 @@ async function syncRecords(tableData: TableData, newRecords: RowRecord[],
addedRecords.map(uniqueId).join(", "),
updatedRecords.map(([r]) => uniqueId(r)).join(", "));
const tableId = tableData.tableId;
const userActions: UserAction[] = [];
if (removedRecords.length > 0) {
userActions.push(['BulkRemoveRecord', removedRecords.map(r => r.id)]);
userActions.push(['BulkRemoveRecord', tableId, removedRecords.map(r => r.id)]);
}
if (updatedRecords.length > 0) {
userActions.push(['BulkUpdateRecord', updatedRecords.map(([r]) => r.id), getColChanges(updatedRecords)]);
userActions.push(['BulkUpdateRecord', tableId, updatedRecords.map(([r]) => r.id), getColChanges(updatedRecords)]);
}
let addActionIndex: number = -1;
if (addedRecords.length > 0) {
addActionIndex = userActions.length;
userActions.push(['BulkAddRecord', addedRecords.map(r => null), getColValues(addedRecords)]);
userActions.push(['BulkAddRecord', tableId, addedRecords.map(r => r.id), getColValues(addedRecords)]);
}
const rowIdMap = new Map<string, number>();
oldRecords.forEach((r) => rowIdMap.set(uniqueId(r), r.id));
if (userActions.length > 0) {
const results = await tableData.sendTableActions(userActions);
const newRowIds = results[addActionIndex];
addedRecords.forEach((r, i) => rowIdMap.set(uniqueId(r), newRowIds[i]));
}
return rowIdMap;
// Include generated rowIds for added records into the returned map.
addedRecords.forEach(r => rowIdMap.set(uniqueId(r), r.id));
return {userActions, rowIdMap};
}
/**

View File

@ -52,6 +52,23 @@ class ActionSummary(object):
col_delta = table_delta and table_delta.column_deltas.pop(col_id, None)
return self._changes_to_actions(table_id, col_id, col_delta or {}, out_stored, out_undo)
def update_new_rows_map(self, table_id, temp_row_ids, final_row_ids):
"""
Add a mapping from temporary negative row_ids to the final ones, for rows added to the given
table. The two lists must have the same length; only negative row_ids are remembered. If a
negative row_id was already used, its mapping will be overridden.
"""
t = self._forTable(table_id)
t.temp_row_ids.update((a, b) for (a, b) in zip(temp_row_ids, final_row_ids) if a and a < 0)
def translate_new_row_ids(self, table_id, row_ids):
"""
Translate any temporary (negative) row_ids to their final values, using mappings created by
update_new_rows_map().
"""
t = self._forTable(table_id)
return [t.temp_row_ids.get(r, r) for r in row_ids]
def _changes_to_actions(self, table_id, col_id, column_delta, out_stored, out_undo):
"""
Given a column and a dict of column_deltas for it, of the form {row_id: (before_value,
@ -155,6 +172,10 @@ class TableDelta(object):
self.column_renames = LabelRenames()
self.column_deltas = {} # maps col_id to the dict {row_id: (before_value, after_value)}
# Map of negative row_ids that may be used in [Bulk]AddRecord actions to the final row_ids for
# those rows; to allow translating Reference values added in the same action bundle.
self.temp_row_ids = {}
class LabelRenames(object):
"""

View File

@ -201,7 +201,7 @@ class BaseColumn(object):
"""
return self.type_obj.convert(value_to_convert)
def prepare_new_values(self, values, ignore_data=False):
def prepare_new_values(self, values, ignore_data=False, action_summary=None):
"""
This allows us to modify values and also produce adjustments to existing records. This
currently is only used by PositionColumn. Returns two lists: new_values, and
@ -281,7 +281,7 @@ class PositionColumn(NumericColumn):
super(PositionColumn, self).copy_from_column(other_column)
self._sorted_rows = SortedListWithKey(other_column._sorted_rows[:], key=self.raw_get)
def prepare_new_values(self, values, ignore_data=False):
def prepare_new_values(self, values, ignore_data=False, action_summary=None):
# This does the work of adjusting positions and relabeling existing rows with new position
# (without changing sort order) to make space for the new positions. Note that this is also
# used for updating a position for an existing row: we'll find a new value for it; later when
@ -356,6 +356,11 @@ class ReferenceColumn(BaseReferenceColumn):
if new_value:
self._relation.add_reference(row_id, new_value)
def prepare_new_values(self, values, ignore_data=False, action_summary=None):
if action_summary and values:
values = action_summary.translate_new_row_ids(self._target_table.table_id, values)
return values, []
class ReferenceListColumn(BaseReferenceColumn):
"""

View File

@ -862,7 +862,8 @@ class Engine(object):
# If there are values for any PositionNumber columns, ensure PositionNumbers are ordered as
# intended but are all unique, which may require updating other positions.
nvalues, adjustments = col_obj.prepare_new_values(values)
nvalues, adjustments = col_obj.prepare_new_values(values,
action_summary=self.out_actions.summary)
if adjustments:
extra_actions.append(actions.BulkUpdateRecord(
action.table_id, [r for r,v in adjustments], {col_id: [v for r,v in adjustments]}))
@ -880,7 +881,8 @@ class Engine(object):
defaults = [col_obj.getdefault() for r in row_ids]
# We use defaults to get new values or adjustments. If we are replacing data, we'll make
# the adjustments without regard to the existing data.
nvalues, adjustments = col_obj.prepare_new_values(defaults, ignore_data=ignore_data)
nvalues, adjustments = col_obj.prepare_new_values(defaults, ignore_data=ignore_data,
action_summary=self.out_actions.summary)
if adjustments:
extra_actions.append(actions.BulkUpdateRecord(
action.table_id, [r for r,v in adjustments], {col_id: [v for r,v in adjustments]}))

View File

@ -0,0 +1,88 @@
import test_engine
import testsamples
import useractions
class TestTempRowIds(test_engine.EngineTestCase):
def test_temp_row_ids(self):
self.load_sample(testsamples.sample_students)
out_actions = self.engine.apply_user_actions([useractions.from_repr(ua) for ua in (
# Add a mix of records with or without temp rowIds.
['AddRecord', 'Address', None, {'city': 'A'}],
['AddRecord', 'Address', -1, {'city': 'B'}],
['BulkAddRecord', 'Address', [-3, None, -7, -10], {'city': ['C', 'D', 'E', 'F']}],
# -3 translates to C; the new record of -1 applies to a different table, so doesn't affect
# its translation to city A.
['AddRecord', 'Schools', -1, {'address': -3, 'name': 'SUNY C'}],
# Add a mix of records referring to new, existing, or null rows.
['BulkAddRecord', 'Schools', [None, None, None, None, None], {
'address': [-1, 11, 0, -3, -7],
'name': ['SUNY A', 'NYU', 'Xavier', 'Suny C2', 'Suny E'],
}
],
# Try a few updates too.
['UpdateRecord', 'Schools', 1, {'address': -7}],
['BulkUpdateRecord', 'Schools', [2, 3, 4], {'address': [-3, -1, 11]}],
# Later temp rowIds override previous one. Here, -3 was already used.
['AddRecord', 'Address', -3, {'city': 'G'}],
['AddRecord', 'Schools', None, {'address': -3, 'name': 'SUNY G'}],
)])
# Test that we get the correct resulting data.
self.assertTableData('Address', cols="subset", data=[
["id", "city" ],
[11, "New York" ],
[12, "Colombia" ],
[13, "New Haven" ],
[14, "West Haven" ],
[15, "A"],
[16, "B"], # was -1
[17, "C"], # was -3
[18, "D"],
[19, "E"], # was -7
[20, "F"], # was -10
[21, "G"], # was -3
])
self.assertTableData('Schools', cols="subset", data=[
["id", "name", "address"],
[1, "Columbia", 19],
[2, "Columbia", 17],
[3, "Yale", 16],
[4, "Yale", 11],
[5, "SUNY C", 17],
[6, "SUNY A", 16],
[7, "NYU", 11],
[8, "Xavier", 0],
[9, "Suny C2", 17],
[10, "Suny E", 19],
[11, "SUNY G", 21],
])
# Test that the actions above got properly translated.
# These are same as above, except for the translated rowIds.
self.assertPartialOutActions(out_actions, {
"stored": [
['AddRecord', 'Address', 15, {'city': 'A'}],
['AddRecord', 'Address', 16, {'city': 'B'}],
['BulkAddRecord', 'Address', [17, 18, 19, 20], {'city': ['C', 'D', 'E', 'F']}],
['AddRecord', 'Schools', 5, {'address': 17, 'name': 'SUNY C'}],
['BulkAddRecord', 'Schools', [6, 7, 8, 9, 10], {
'address': [16, 11, 0, 17, 19],
'name': ['SUNY A', 'NYU', 'Xavier', 'Suny C2', 'Suny E'],
}
],
['UpdateRecord', 'Schools', 1, {'address': 19}],
['BulkUpdateRecord', 'Schools', [2, 3, 4], {'address': [17, 16, 11]}],
['AddRecord', 'Address', 21, {'city': 'G'}],
['AddRecord', 'Schools', 11, {'address': 21, 'name': 'SUNY G'}],
# Calculated values (for Students; lookups on schools named "Columbia" and "Yale")
["BulkUpdateRecord", "Students", [1, 2, 3, 4, 6], {
"schoolCities": ["E:C", "B:New York", "E:C", "B:New York", "B:New York"]}],
]
})

View File

@ -294,10 +294,15 @@ class UserActions(object):
# Make a copy of row_ids and fill in those set to None.
filled_row_ids = row_ids[:]
for i, row_id in enumerate(filled_row_ids):
if row_id is None:
if row_id is None or row_id < 0:
filled_row_ids[i] = row_id = next_row_id
next_row_id = max(next_row_id, row_id) + 1
# Whenever we add new rows, remember the mapping from any negative row_ids to their final
# values. This allows the negative_row_ids to be used as Reference values in subsequent
# actions in the same bundle.
self._engine.out_actions.summary.update_new_rows_map(table_id, row_ids, filled_row_ids)
# Convert entered values to the correct types.
ActionType = actions.ReplaceTableData if replace else actions.BulkAddRecord
action, extra_actions = self._engine.convert_action_values(