2020-11-27 05:37:06 +00:00
|
|
|
const { Errors } = require('../../shared')
|
|
|
|
const { NodeDescriptorType } = require('../../enum')
|
|
|
|
const fs = require('fs')
|
2020-11-29 17:53:55 +00:00
|
|
|
const tmp = require('tmp-promise')
|
|
|
|
const CombinedStream = require('combined-stream')
|
2020-11-27 05:37:06 +00:00
|
|
|
|
|
|
|
module.exports = exports = async (message, di) => {
|
|
|
|
const Node = di.models.get('fs:Node')
|
|
|
|
const { descriptor } = message.data()
|
|
|
|
|
|
|
|
const node_uuid = message.socket.session.file_descriptors?.[descriptor]
|
|
|
|
if ( !node_uuid ) {
|
|
|
|
return message.send_response(
|
|
|
|
message.fresh().error(Errors.NodeDoesNotExist)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2020-11-29 17:53:55 +00:00
|
|
|
// Check if we have a temporary files which have been written to.
|
|
|
|
// If so, we should "flush" the temporary files to the proper storage.
|
|
|
|
const placeholders = message.socket.session.temp_write_files?.[descriptor]
|
|
|
|
if ( !placeholders ) {
|
2020-11-27 05:37:06 +00:00
|
|
|
return message.send_response(
|
|
|
|
message.fresh()
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
const node = await Node.findOne({
|
|
|
|
uuid: node_uuid,
|
|
|
|
deleted: false,
|
|
|
|
descriptor_type: NodeDescriptorType.File,
|
|
|
|
})
|
|
|
|
|
|
|
|
if ( !node ) {
|
|
|
|
return message.send_response(
|
|
|
|
message.fresh().error(Errors.NodeDoesNotExist)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
const existing_file = await node.uploaded_file()
|
|
|
|
if ( existing_file ) {
|
|
|
|
// This is a new write, so delete the old file
|
|
|
|
await existing_file.delete()
|
|
|
|
delete node.uploaded_file_id
|
|
|
|
}
|
|
|
|
|
2020-11-29 17:53:55 +00:00
|
|
|
// Sort the placeholders by the byte starting position
|
|
|
|
placeholders.sort((a, b) => {
|
|
|
|
if ( a.position < b.position ) return -1
|
|
|
|
if ( a.position > b.position ) return 1
|
|
|
|
return 0
|
|
|
|
})
|
|
|
|
|
|
|
|
// Combine them into a single stream
|
|
|
|
const stream = CombinedStream.create()
|
|
|
|
const combined_file = await tmp.file()
|
|
|
|
|
|
|
|
for ( const placeholder of placeholders ) {
|
|
|
|
stream.append(fs.createReadStream(placeholder.path))
|
|
|
|
}
|
|
|
|
|
|
|
|
const pipe = stream.pipe(fs.createWriteStream(combined_file.path))
|
|
|
|
|
|
|
|
await new Promise(res => pipe.on('finish', res))
|
|
|
|
|
2020-11-27 05:37:06 +00:00
|
|
|
// Store the temporary file
|
|
|
|
const new_file = await di.upload.provider().store({
|
2020-11-29 17:53:55 +00:00
|
|
|
temp_path: combined_file.path,
|
2020-11-27 05:37:06 +00:00
|
|
|
original_name: node.pied_name,
|
|
|
|
mime_type: 'application/octet-stream', // TODO determine from file extension?
|
|
|
|
})
|
|
|
|
|
2020-11-29 17:53:55 +00:00
|
|
|
const stat = await fs.promises.stat(combined_file.path)
|
2020-11-29 17:54:42 +00:00
|
|
|
await fs.promises.unlink(combined_file.path)
|
2020-11-29 17:53:55 +00:00
|
|
|
|
2020-11-27 05:37:06 +00:00
|
|
|
node.uploaded_file_id = new_file.id
|
|
|
|
node.size = stat.size
|
|
|
|
await node.save()
|
|
|
|
|
|
|
|
message.send_response(
|
|
|
|
message.fresh()
|
|
|
|
)
|
|
|
|
}
|