mirror of
https://github.com/gristlabs/grist-core.git
synced 2024-10-27 20:44:07 +00:00
Create user last connection datetime (#935)
Each time the a Grist page is reload the `last_connection_at` of the user is updated resolve [#924](https://github.com/gristlabs/grist-core/issues/924)
This commit is contained in:
parent
0e777b1fcf
commit
61421e8251
@ -29,6 +29,9 @@ export class User extends BaseEntity {
|
|||||||
@Column({name: 'first_login_at', type: Date, nullable: true})
|
@Column({name: 'first_login_at', type: Date, nullable: true})
|
||||||
public firstLoginAt: Date | null;
|
public firstLoginAt: Date | null;
|
||||||
|
|
||||||
|
@Column({name: 'last_connection_at', type: Date, nullable: true})
|
||||||
|
public lastConnectionAt: Date | null;
|
||||||
|
|
||||||
@OneToOne(type => Organization, organization => organization.owner)
|
@OneToOne(type => Organization, organization => organization.owner)
|
||||||
public personalOrg: Organization;
|
public personalOrg: Organization;
|
||||||
|
|
||||||
|
@ -395,14 +395,6 @@ export class UsersManager {
|
|||||||
user.name = (profile && (profile.name || email.split('@')[0])) || '';
|
user.name = (profile && (profile.name || email.split('@')[0])) || '';
|
||||||
needUpdate = true;
|
needUpdate = true;
|
||||||
}
|
}
|
||||||
if (profile && !user.firstLoginAt) {
|
|
||||||
// set first login time to now (remove milliseconds for compatibility with other
|
|
||||||
// timestamps in db set by typeorm, and since second level precision is fine)
|
|
||||||
const nowish = new Date();
|
|
||||||
nowish.setMilliseconds(0);
|
|
||||||
user.firstLoginAt = nowish;
|
|
||||||
needUpdate = true;
|
|
||||||
}
|
|
||||||
if (!user.picture && profile && profile.picture) {
|
if (!user.picture && profile && profile.picture) {
|
||||||
// Set the user's profile picture if our provider knows it.
|
// Set the user's profile picture if our provider knows it.
|
||||||
user.picture = profile.picture;
|
user.picture = profile.picture;
|
||||||
@ -432,6 +424,25 @@ export class UsersManager {
|
|||||||
user.options = {...(user.options ?? {}), authSubject: userOptions.authSubject};
|
user.options = {...(user.options ?? {}), authSubject: userOptions.authSubject};
|
||||||
needUpdate = true;
|
needUpdate = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// get date of now (remove milliseconds for compatibility with other
|
||||||
|
// timestamps in db set by typeorm, and since second level precision is fine)
|
||||||
|
const nowish = new Date();
|
||||||
|
nowish.setMilliseconds(0);
|
||||||
|
if (profile && !user.firstLoginAt) {
|
||||||
|
// set first login time to now
|
||||||
|
user.firstLoginAt = nowish;
|
||||||
|
needUpdate = true;
|
||||||
|
}
|
||||||
|
const getTimestampStartOfDay = (date: Date) => {
|
||||||
|
const timestamp = Math.floor(date.getTime() / 1000); // unix timestamp seconds from epoc
|
||||||
|
const startOfDay = timestamp - (timestamp % 86400 /*24h*/); // start of a day in seconds since epoc
|
||||||
|
return startOfDay;
|
||||||
|
};
|
||||||
|
if (!user.lastConnectionAt || getTimestampStartOfDay(user.lastConnectionAt) !== getTimestampStartOfDay(nowish)) {
|
||||||
|
user.lastConnectionAt = nowish;
|
||||||
|
needUpdate = true;
|
||||||
|
}
|
||||||
if (needUpdate) {
|
if (needUpdate) {
|
||||||
login.user = user;
|
login.user = user;
|
||||||
await manager.save([user, login]);
|
await manager.save([user, login]);
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {User} from 'app/gen-server/entity/User';
|
|
||||||
import {makeId} from 'app/server/lib/idUtils';
|
import {makeId} from 'app/server/lib/idUtils';
|
||||||
|
import {chunk} from 'lodash';
|
||||||
import {MigrationInterface, QueryRunner, TableColumn} from "typeorm";
|
import {MigrationInterface, QueryRunner, TableColumn} from "typeorm";
|
||||||
|
|
||||||
export class UserUUID1663851423064 implements MigrationInterface {
|
export class UserUUID1663851423064 implements MigrationInterface {
|
||||||
@ -16,11 +16,20 @@ export class UserUUID1663851423064 implements MigrationInterface {
|
|||||||
// Updating so many rows in a multiple queries is not ideal. We will send updates in chunks.
|
// Updating so many rows in a multiple queries is not ideal. We will send updates in chunks.
|
||||||
// 300 seems to be a good number, for 24k rows we have 80 queries.
|
// 300 seems to be a good number, for 24k rows we have 80 queries.
|
||||||
const userList = await queryRunner.manager.createQueryBuilder()
|
const userList = await queryRunner.manager.createQueryBuilder()
|
||||||
.select("users")
|
.select(["users.id", "users.ref"])
|
||||||
.from(User, "users")
|
.from("users", "users")
|
||||||
.getMany();
|
.getMany();
|
||||||
userList.forEach(u => u.ref = makeId());
|
userList.forEach(u => u.ref = makeId());
|
||||||
await queryRunner.manager.save(userList, { chunk: 300 });
|
|
||||||
|
const userChunks = chunk(userList, 300);
|
||||||
|
for (const users of userChunks) {
|
||||||
|
await queryRunner.connection.transaction(async manager => {
|
||||||
|
const queries = users.map((user: any, _index: number, _array: any[]) => {
|
||||||
|
return queryRunner.manager.update("users", user.id, user);
|
||||||
|
});
|
||||||
|
await Promise.all(queries);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// We are not making this column unique yet, because it can fail
|
// We are not making this column unique yet, because it can fail
|
||||||
// if there are some old workers still running, and any new user
|
// if there are some old workers still running, and any new user
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import {User} from 'app/gen-server/entity/User';
|
|
||||||
import {makeId} from 'app/server/lib/idUtils';
|
import {makeId} from 'app/server/lib/idUtils';
|
||||||
|
import {chunk} from 'lodash';
|
||||||
import {MigrationInterface, QueryRunner} from "typeorm";
|
import {MigrationInterface, QueryRunner} from "typeorm";
|
||||||
|
|
||||||
export class UserRefUnique1664528376930 implements MigrationInterface {
|
export class UserRefUnique1664528376930 implements MigrationInterface {
|
||||||
@ -9,12 +9,21 @@ export class UserRefUnique1664528376930 implements MigrationInterface {
|
|||||||
|
|
||||||
// Update users that don't have unique ref set.
|
// Update users that don't have unique ref set.
|
||||||
const userList = await queryRunner.manager.createQueryBuilder()
|
const userList = await queryRunner.manager.createQueryBuilder()
|
||||||
.select("users")
|
.select(["users.id", "users.ref"])
|
||||||
.from(User, "users")
|
.from("users", "users")
|
||||||
.where("ref is null")
|
.where("users.ref is null")
|
||||||
.getMany();
|
.getMany();
|
||||||
userList.forEach(u => u.ref = makeId());
|
userList.forEach(u => u.ref = makeId());
|
||||||
await queryRunner.manager.save(userList, {chunk: 300});
|
|
||||||
|
const userChunks = chunk(userList, 300);
|
||||||
|
for (const users of userChunks) {
|
||||||
|
await queryRunner.connection.transaction(async manager => {
|
||||||
|
const queries = users.map((user: any, _index: number, _array: any[]) => {
|
||||||
|
return queryRunner.manager.update("users", user.id, user);
|
||||||
|
});
|
||||||
|
await Promise.all(queries);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Mark column as unique and non-nullable.
|
// Mark column as unique and non-nullable.
|
||||||
const users = (await queryRunner.getTable('users'))!;
|
const users = (await queryRunner.getTable('users'))!;
|
||||||
|
18
app/gen-server/migration/1713186031023-UserLastConnection.ts
Normal file
18
app/gen-server/migration/1713186031023-UserLastConnection.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import {MigrationInterface, QueryRunner, TableColumn} from 'typeorm';
|
||||||
|
|
||||||
|
export class UserLastConnection1713186031023 implements MigrationInterface {
|
||||||
|
|
||||||
|
public async up(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
const sqlite = queryRunner.connection.driver.options.type === 'sqlite';
|
||||||
|
const datetime = sqlite ? "datetime" : "timestamp with time zone";
|
||||||
|
await queryRunner.addColumn('users', new TableColumn({
|
||||||
|
name: 'last_connection_at',
|
||||||
|
type: datetime,
|
||||||
|
isNullable: true
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
public async down(queryRunner: QueryRunner): Promise<any> {
|
||||||
|
await queryRunner.dropColumn('users', 'last_connection_at');
|
||||||
|
}
|
||||||
|
}
|
@ -21,8 +21,8 @@ export const TEST_HTTPS_OFFSET = process.env.GRIST_TEST_HTTPS_OFFSET ?
|
|||||||
|
|
||||||
// Database fields that we permit in entities but don't want to cross the api.
|
// Database fields that we permit in entities but don't want to cross the api.
|
||||||
const INTERNAL_FIELDS = new Set([
|
const INTERNAL_FIELDS = new Set([
|
||||||
'apiKey', 'billingAccountId', 'firstLoginAt', 'filteredOut', 'ownerId', 'gracePeriodStart', 'stripeCustomerId',
|
'apiKey', 'billingAccountId', 'firstLoginAt', 'lastConnectionAt', 'filteredOut', 'ownerId', 'gracePeriodStart',
|
||||||
'stripeSubscriptionId', 'stripeProductId', 'userId', 'isFirstTimeUser', 'allowGoogleLogin',
|
'stripeCustomerId', 'stripeSubscriptionId', 'stripeProductId', 'userId', 'isFirstTimeUser', 'allowGoogleLogin',
|
||||||
'authSubject', 'usage', 'createdBy'
|
'authSubject', 'usage', 'createdBy'
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
@ -42,6 +42,8 @@ import {ActivationPrefs1682636695021 as ActivationPrefs} from 'app/gen-server/mi
|
|||||||
import {AssistantLimit1685343047786 as AssistantLimit} from 'app/gen-server/migration/1685343047786-AssistantLimit';
|
import {AssistantLimit1685343047786 as AssistantLimit} from 'app/gen-server/migration/1685343047786-AssistantLimit';
|
||||||
import {Shares1701557445716 as Shares} from 'app/gen-server/migration/1701557445716-Shares';
|
import {Shares1701557445716 as Shares} from 'app/gen-server/migration/1701557445716-Shares';
|
||||||
import {Billing1711557445716 as BillingFeatures} from 'app/gen-server/migration/1711557445716-Billing';
|
import {Billing1711557445716 as BillingFeatures} from 'app/gen-server/migration/1711557445716-Billing';
|
||||||
|
import {UserLastConnection1713186031023
|
||||||
|
as UserLastConnection} from 'app/gen-server/migration/1713186031023-UserLastConnection';
|
||||||
|
|
||||||
const home: HomeDBManager = new HomeDBManager();
|
const home: HomeDBManager = new HomeDBManager();
|
||||||
|
|
||||||
@ -50,7 +52,8 @@ const migrations = [Initial, Login, PinDocs, UserPicture, DisplayEmail, DisplayE
|
|||||||
CustomerIndex, ExtraIndexes, OrgHost, DocRemovedAt, Prefs,
|
CustomerIndex, ExtraIndexes, OrgHost, DocRemovedAt, Prefs,
|
||||||
ExternalBilling, DocOptions, Secret, UserOptions, GracePeriodStart,
|
ExternalBilling, DocOptions, Secret, UserOptions, GracePeriodStart,
|
||||||
DocumentUsage, Activations, UserConnectId, UserUUID, UserUniqueRefUUID,
|
DocumentUsage, Activations, UserConnectId, UserUUID, UserUniqueRefUUID,
|
||||||
Forks, ForkIndexes, ActivationPrefs, AssistantLimit, Shares, BillingFeatures];
|
Forks, ForkIndexes, ActivationPrefs, AssistantLimit, Shares, BillingFeatures,
|
||||||
|
UserLastConnection];
|
||||||
|
|
||||||
// Assert that the "members" acl rule and group exist (or not).
|
// Assert that the "members" acl rule and group exist (or not).
|
||||||
function assertMembersGroup(org: Organization, exists: boolean) {
|
function assertMembersGroup(org: Organization, exists: boolean) {
|
||||||
@ -113,6 +116,33 @@ describe('migrations', function() {
|
|||||||
// be doing something.
|
// be doing something.
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('can migrate UserUUID and UserUniqueRefUUID with user in table', async function() {
|
||||||
|
this.timeout(60000);
|
||||||
|
const runner = home.connection.createQueryRunner();
|
||||||
|
|
||||||
|
// Create 400 users to test the chunk (each chunk is 300 users)
|
||||||
|
const nbUsersToCreate = 400;
|
||||||
|
for (const migration of migrations) {
|
||||||
|
if (migration === UserUUID) {
|
||||||
|
for (let i = 0; i < nbUsersToCreate; i++) {
|
||||||
|
await runner.query(`INSERT INTO users (id, name, is_first_time_user) VALUES (${i}, 'name${i}', true)`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await (new migration()).up(runner);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that all refs are unique
|
||||||
|
const userList = await runner.manager.createQueryBuilder()
|
||||||
|
.select(["users.id", "users.ref"])
|
||||||
|
.from("users", "users")
|
||||||
|
.getMany();
|
||||||
|
const setOfUserRefs = new Set(userList.map(u => u.ref));
|
||||||
|
assert.equal(nbUsersToCreate, userList.length);
|
||||||
|
assert.equal(setOfUserRefs.size, userList.length);
|
||||||
|
await addSeedData(home.connection);
|
||||||
|
});
|
||||||
|
|
||||||
it('can correctly switch display_email column to non-null with data', async function() {
|
it('can correctly switch display_email column to non-null with data', async function() {
|
||||||
this.timeout(60000);
|
this.timeout(60000);
|
||||||
const sqlite = home.connection.driver.options.type === 'sqlite';
|
const sqlite = home.connection.driver.options.type === 'sqlite';
|
||||||
|
Loading…
Reference in New Issue
Block a user