mirror of
https://activitypub.software/TransFem-org/Sharkey.git
synced 2024-12-14 14:15:43 +01:00
refactor: temporary files (#8713)
* simplify temporary files for thumbnails Because only a single file will be written to the directory, creating a separate directory seems unnecessary. If only a temporary file is created, the code from `createTemp` can be reused here as well. * refactor: deduplicate code for temporary files/directories To follow the DRY principle, the same code should not be duplicated across different files. Instead an already existing function is used. Because temporary directories are also create in multiple locations, a function for this is also newly added to reduce duplication. * fix: clean up identicon temp files The temporary files for identicons are not reused and can be deleted after they are fully read. This condition is met when the stream is closed and so the file can be cleaned up using the events API of the stream. * fix: ensure cleanup is called when download fails * fix: ensure cleanup is called in error conditions This covers import/export queue jobs and is mostly just wrapping all code in a try...finally statement where the finally runs the cleanup. * fix: use correct type instead of `any`
This commit is contained in:
parent
b049633db7
commit
e27c6abaea
12 changed files with 307 additions and 341 deletions
|
@ -1,10 +1,19 @@
|
||||||
import * as tmp from 'tmp';
|
import * as tmp from 'tmp';
|
||||||
|
|
||||||
export function createTemp(): Promise<[string, any]> {
|
export function createTemp(): Promise<[string, () => void]> {
|
||||||
return new Promise<[string, any]>((res, rej) => {
|
return new Promise<[string, () => void]>((res, rej) => {
|
||||||
tmp.file((e, path, fd, cleanup) => {
|
tmp.file((e, path, fd, cleanup) => {
|
||||||
if (e) return rej(e);
|
if (e) return rej(e);
|
||||||
res([path, cleanup]);
|
res([path, cleanup]);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function createTempDir(): Promise<[string, () => void]> {
|
||||||
|
return new Promise<[string, () => void]>((res, rej) => {
|
||||||
|
tmp.dir((e, path, cleanup) => {
|
||||||
|
if (e) return rej(e);
|
||||||
|
res([path, cleanup]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import Bull from 'bull';
|
import Bull from 'bull';
|
||||||
import * as tmp from 'tmp';
|
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
|
|
||||||
import { queueLogger } from '../../logger.js';
|
import { queueLogger } from '../../logger.js';
|
||||||
import { addFile } from '@/services/drive/add-file.js';
|
import { addFile } from '@/services/drive/add-file.js';
|
||||||
import { format as dateFormat } from 'date-fns';
|
import { format as dateFormat } from 'date-fns';
|
||||||
import { getFullApAccount } from '@/misc/convert-host.js';
|
import { getFullApAccount } from '@/misc/convert-host.js';
|
||||||
|
import { createTemp } from '@/misc/create-temp.js';
|
||||||
import { Users, Blockings } from '@/models/index.js';
|
import { Users, Blockings } from '@/models/index.js';
|
||||||
import { MoreThan } from 'typeorm';
|
import { MoreThan } from 'typeorm';
|
||||||
import { DbUserJobData } from '@/queue/types.js';
|
import { DbUserJobData } from '@/queue/types.js';
|
||||||
|
@ -22,73 +22,72 @@ export async function exportBlocking(job: Bull.Job<DbUserJobData>, done: any): P
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create temp file
|
// Create temp file
|
||||||
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
|
const [path, cleanup] = await createTemp();
|
||||||
tmp.file((e, path, fd, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Temp file is ${path}`);
|
logger.info(`Temp file is ${path}`);
|
||||||
|
|
||||||
const stream = fs.createWriteStream(path, { flags: 'a' });
|
try {
|
||||||
|
const stream = fs.createWriteStream(path, { flags: 'a' });
|
||||||
|
|
||||||
let exportedCount = 0;
|
let exportedCount = 0;
|
||||||
let cursor: any = null;
|
let cursor: any = null;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const blockings = await Blockings.find({
|
const blockings = await Blockings.find({
|
||||||
where: {
|
where: {
|
||||||
blockerId: user.id,
|
blockerId: user.id,
|
||||||
...(cursor ? { id: MoreThan(cursor) } : {}),
|
...(cursor ? { id: MoreThan(cursor) } : {}),
|
||||||
},
|
},
|
||||||
take: 100,
|
take: 100,
|
||||||
order: {
|
order: {
|
||||||
id: 1,
|
id: 1,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (blockings.length === 0) {
|
if (blockings.length === 0) {
|
||||||
job.progress(100);
|
job.progress(100);
|
||||||
break;
|
break;
|
||||||
}
|
|
||||||
|
|
||||||
cursor = blockings[blockings.length - 1].id;
|
|
||||||
|
|
||||||
for (const block of blockings) {
|
|
||||||
const u = await Users.findOneBy({ id: block.blockeeId });
|
|
||||||
if (u == null) {
|
|
||||||
exportedCount++; continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = getFullApAccount(u.username, u.host);
|
cursor = blockings[blockings.length - 1].id;
|
||||||
await new Promise<void>((res, rej) => {
|
|
||||||
stream.write(content + '\n', err => {
|
for (const block of blockings) {
|
||||||
if (err) {
|
const u = await Users.findOneBy({ id: block.blockeeId });
|
||||||
logger.error(err);
|
if (u == null) {
|
||||||
rej(err);
|
exportedCount++; continue;
|
||||||
} else {
|
}
|
||||||
res();
|
|
||||||
}
|
const content = getFullApAccount(u.username, u.host);
|
||||||
|
await new Promise<void>((res, rej) => {
|
||||||
|
stream.write(content + '\n', err => {
|
||||||
|
if (err) {
|
||||||
|
logger.error(err);
|
||||||
|
rej(err);
|
||||||
|
} else {
|
||||||
|
res();
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
exportedCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const total = await Blockings.countBy({
|
||||||
|
blockerId: user.id,
|
||||||
});
|
});
|
||||||
exportedCount++;
|
|
||||||
|
job.progress(exportedCount / total);
|
||||||
}
|
}
|
||||||
|
|
||||||
const total = await Blockings.countBy({
|
stream.end();
|
||||||
blockerId: user.id,
|
logger.succ(`Exported to: ${path}`);
|
||||||
});
|
|
||||||
|
|
||||||
job.progress(exportedCount / total);
|
const fileName = 'blocking-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
|
||||||
|
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
||||||
|
|
||||||
|
logger.succ(`Exported to: ${driveFile.id}`);
|
||||||
|
} finally {
|
||||||
|
cleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
stream.end();
|
|
||||||
logger.succ(`Exported to: ${path}`);
|
|
||||||
|
|
||||||
const fileName = 'blocking-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
|
|
||||||
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
|
||||||
|
|
||||||
logger.succ(`Exported to: ${driveFile.id}`);
|
|
||||||
cleanup();
|
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import Bull from 'bull';
|
import Bull from 'bull';
|
||||||
import * as tmp from 'tmp';
|
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
|
|
||||||
import { ulid } from 'ulid';
|
import { ulid } from 'ulid';
|
||||||
|
@ -10,6 +9,7 @@ import { addFile } from '@/services/drive/add-file.js';
|
||||||
import { format as dateFormat } from 'date-fns';
|
import { format as dateFormat } from 'date-fns';
|
||||||
import { Users, Emojis } from '@/models/index.js';
|
import { Users, Emojis } from '@/models/index.js';
|
||||||
import { } from '@/queue/types.js';
|
import { } from '@/queue/types.js';
|
||||||
|
import { createTempDir } from '@/misc/create-temp.js';
|
||||||
import { downloadUrl } from '@/misc/download-url.js';
|
import { downloadUrl } from '@/misc/download-url.js';
|
||||||
import config from '@/config/index.js';
|
import config from '@/config/index.js';
|
||||||
import { IsNull } from 'typeorm';
|
import { IsNull } from 'typeorm';
|
||||||
|
@ -25,13 +25,7 @@ export async function exportCustomEmojis(job: Bull.Job, done: () => void): Promi
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create temp dir
|
const [path, cleanup] = await createTempDir();
|
||||||
const [path, cleanup] = await new Promise<[string, () => void]>((res, rej) => {
|
|
||||||
tmp.dir((e, path, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Temp dir is ${path}`);
|
logger.info(`Temp dir is ${path}`);
|
||||||
|
|
||||||
|
@ -98,12 +92,7 @@ export async function exportCustomEmojis(job: Bull.Job, done: () => void): Promi
|
||||||
metaStream.end();
|
metaStream.end();
|
||||||
|
|
||||||
// Create archive
|
// Create archive
|
||||||
const [archivePath, archiveCleanup] = await new Promise<[string, () => void]>((res, rej) => {
|
const [archivePath, archiveCleanup] = await createTemp();
|
||||||
tmp.file((e, path, fd, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
const archiveStream = fs.createWriteStream(archivePath);
|
const archiveStream = fs.createWriteStream(archivePath);
|
||||||
const archive = archiver('zip', {
|
const archive = archiver('zip', {
|
||||||
zlib: { level: 0 },
|
zlib: { level: 0 },
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import Bull from 'bull';
|
import Bull from 'bull';
|
||||||
import * as tmp from 'tmp';
|
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
|
|
||||||
import { queueLogger } from '../../logger.js';
|
import { queueLogger } from '../../logger.js';
|
||||||
import { addFile } from '@/services/drive/add-file.js';
|
import { addFile } from '@/services/drive/add-file.js';
|
||||||
import { format as dateFormat } from 'date-fns';
|
import { format as dateFormat } from 'date-fns';
|
||||||
import { getFullApAccount } from '@/misc/convert-host.js';
|
import { getFullApAccount } from '@/misc/convert-host.js';
|
||||||
|
import { createTemp } from '@/misc/create-temp.js';
|
||||||
import { Users, Followings, Mutings } from '@/models/index.js';
|
import { Users, Followings, Mutings } from '@/models/index.js';
|
||||||
import { In, MoreThan, Not } from 'typeorm';
|
import { In, MoreThan, Not } from 'typeorm';
|
||||||
import { DbUserJobData } from '@/queue/types.js';
|
import { DbUserJobData } from '@/queue/types.js';
|
||||||
|
@ -23,73 +23,72 @@ export async function exportFollowing(job: Bull.Job<DbUserJobData>, done: () =>
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create temp file
|
// Create temp file
|
||||||
const [path, cleanup] = await new Promise<[string, () => void]>((res, rej) => {
|
const [path, cleanup] = await createTemp();
|
||||||
tmp.file((e, path, fd, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Temp file is ${path}`);
|
logger.info(`Temp file is ${path}`);
|
||||||
|
|
||||||
const stream = fs.createWriteStream(path, { flags: 'a' });
|
try {
|
||||||
|
const stream = fs.createWriteStream(path, { flags: 'a' });
|
||||||
|
|
||||||
let cursor: Following['id'] | null = null;
|
let cursor: Following['id'] | null = null;
|
||||||
|
|
||||||
const mutings = job.data.excludeMuting ? await Mutings.findBy({
|
const mutings = job.data.excludeMuting ? await Mutings.findBy({
|
||||||
muterId: user.id,
|
muterId: user.id,
|
||||||
}) : [];
|
}) : [];
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const followings = await Followings.find({
|
const followings = await Followings.find({
|
||||||
where: {
|
where: {
|
||||||
followerId: user.id,
|
followerId: user.id,
|
||||||
...(mutings.length > 0 ? { followeeId: Not(In(mutings.map(x => x.muteeId))) } : {}),
|
...(mutings.length > 0 ? { followeeId: Not(In(mutings.map(x => x.muteeId))) } : {}),
|
||||||
...(cursor ? { id: MoreThan(cursor) } : {}),
|
...(cursor ? { id: MoreThan(cursor) } : {}),
|
||||||
},
|
},
|
||||||
take: 100,
|
take: 100,
|
||||||
order: {
|
order: {
|
||||||
id: 1,
|
id: 1,
|
||||||
},
|
},
|
||||||
}) as Following[];
|
}) as Following[];
|
||||||
|
|
||||||
if (followings.length === 0) {
|
if (followings.length === 0) {
|
||||||
break;
|
break;
|
||||||
}
|
|
||||||
|
|
||||||
cursor = followings[followings.length - 1].id;
|
|
||||||
|
|
||||||
for (const following of followings) {
|
|
||||||
const u = await Users.findOneBy({ id: following.followeeId });
|
|
||||||
if (u == null) {
|
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (job.data.excludeInactive && u.updatedAt && (Date.now() - u.updatedAt.getTime() > 1000 * 60 * 60 * 24 * 90)) {
|
cursor = followings[followings.length - 1].id;
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = getFullApAccount(u.username, u.host);
|
for (const following of followings) {
|
||||||
await new Promise<void>((res, rej) => {
|
const u = await Users.findOneBy({ id: following.followeeId });
|
||||||
stream.write(content + '\n', err => {
|
if (u == null) {
|
||||||
if (err) {
|
continue;
|
||||||
logger.error(err);
|
}
|
||||||
rej(err);
|
|
||||||
} else {
|
if (job.data.excludeInactive && u.updatedAt && (Date.now() - u.updatedAt.getTime() > 1000 * 60 * 60 * 24 * 90)) {
|
||||||
res();
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const content = getFullApAccount(u.username, u.host);
|
||||||
|
await new Promise<void>((res, rej) => {
|
||||||
|
stream.write(content + '\n', err => {
|
||||||
|
if (err) {
|
||||||
|
logger.error(err);
|
||||||
|
rej(err);
|
||||||
|
} else {
|
||||||
|
res();
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stream.end();
|
||||||
|
logger.succ(`Exported to: ${path}`);
|
||||||
|
|
||||||
|
const fileName = 'following-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
|
||||||
|
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
||||||
|
|
||||||
|
logger.succ(`Exported to: ${driveFile.id}`);
|
||||||
|
} finally {
|
||||||
|
cleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
stream.end();
|
|
||||||
logger.succ(`Exported to: ${path}`);
|
|
||||||
|
|
||||||
const fileName = 'following-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
|
|
||||||
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
|
||||||
|
|
||||||
logger.succ(`Exported to: ${driveFile.id}`);
|
|
||||||
cleanup();
|
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import Bull from 'bull';
|
import Bull from 'bull';
|
||||||
import * as tmp from 'tmp';
|
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
|
|
||||||
import { queueLogger } from '../../logger.js';
|
import { queueLogger } from '../../logger.js';
|
||||||
import { addFile } from '@/services/drive/add-file.js';
|
import { addFile } from '@/services/drive/add-file.js';
|
||||||
import { format as dateFormat } from 'date-fns';
|
import { format as dateFormat } from 'date-fns';
|
||||||
import { getFullApAccount } from '@/misc/convert-host.js';
|
import { getFullApAccount } from '@/misc/convert-host.js';
|
||||||
|
import { createTemp } from '@/misc/create-temp.js';
|
||||||
import { Users, Mutings } from '@/models/index.js';
|
import { Users, Mutings } from '@/models/index.js';
|
||||||
import { IsNull, MoreThan } from 'typeorm';
|
import { IsNull, MoreThan } from 'typeorm';
|
||||||
import { DbUserJobData } from '@/queue/types.js';
|
import { DbUserJobData } from '@/queue/types.js';
|
||||||
|
@ -22,74 +22,73 @@ export async function exportMute(job: Bull.Job<DbUserJobData>, done: any): Promi
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create temp file
|
// Create temp file
|
||||||
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
|
const [path, cleanup] = await createTemp();
|
||||||
tmp.file((e, path, fd, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Temp file is ${path}`);
|
logger.info(`Temp file is ${path}`);
|
||||||
|
|
||||||
const stream = fs.createWriteStream(path, { flags: 'a' });
|
try {
|
||||||
|
const stream = fs.createWriteStream(path, { flags: 'a' });
|
||||||
|
|
||||||
let exportedCount = 0;
|
let exportedCount = 0;
|
||||||
let cursor: any = null;
|
let cursor: any = null;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const mutes = await Mutings.find({
|
const mutes = await Mutings.find({
|
||||||
where: {
|
where: {
|
||||||
muterId: user.id,
|
muterId: user.id,
|
||||||
expiresAt: IsNull(),
|
expiresAt: IsNull(),
|
||||||
...(cursor ? { id: MoreThan(cursor) } : {}),
|
...(cursor ? { id: MoreThan(cursor) } : {}),
|
||||||
},
|
},
|
||||||
take: 100,
|
take: 100,
|
||||||
order: {
|
order: {
|
||||||
id: 1,
|
id: 1,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (mutes.length === 0) {
|
if (mutes.length === 0) {
|
||||||
job.progress(100);
|
job.progress(100);
|
||||||
break;
|
break;
|
||||||
}
|
|
||||||
|
|
||||||
cursor = mutes[mutes.length - 1].id;
|
|
||||||
|
|
||||||
for (const mute of mutes) {
|
|
||||||
const u = await Users.findOneBy({ id: mute.muteeId });
|
|
||||||
if (u == null) {
|
|
||||||
exportedCount++; continue;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = getFullApAccount(u.username, u.host);
|
cursor = mutes[mutes.length - 1].id;
|
||||||
await new Promise<void>((res, rej) => {
|
|
||||||
stream.write(content + '\n', err => {
|
for (const mute of mutes) {
|
||||||
if (err) {
|
const u = await Users.findOneBy({ id: mute.muteeId });
|
||||||
logger.error(err);
|
if (u == null) {
|
||||||
rej(err);
|
exportedCount++; continue;
|
||||||
} else {
|
}
|
||||||
res();
|
|
||||||
}
|
const content = getFullApAccount(u.username, u.host);
|
||||||
|
await new Promise<void>((res, rej) => {
|
||||||
|
stream.write(content + '\n', err => {
|
||||||
|
if (err) {
|
||||||
|
logger.error(err);
|
||||||
|
rej(err);
|
||||||
|
} else {
|
||||||
|
res();
|
||||||
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
exportedCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const total = await Mutings.countBy({
|
||||||
|
muterId: user.id,
|
||||||
});
|
});
|
||||||
exportedCount++;
|
|
||||||
|
job.progress(exportedCount / total);
|
||||||
}
|
}
|
||||||
|
|
||||||
const total = await Mutings.countBy({
|
stream.end();
|
||||||
muterId: user.id,
|
logger.succ(`Exported to: ${path}`);
|
||||||
});
|
|
||||||
|
|
||||||
job.progress(exportedCount / total);
|
const fileName = 'mute-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
|
||||||
|
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
||||||
|
|
||||||
|
logger.succ(`Exported to: ${driveFile.id}`);
|
||||||
|
} finally {
|
||||||
|
cleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
stream.end();
|
|
||||||
logger.succ(`Exported to: ${path}`);
|
|
||||||
|
|
||||||
const fileName = 'mute-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
|
|
||||||
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
|
||||||
|
|
||||||
logger.succ(`Exported to: ${driveFile.id}`);
|
|
||||||
cleanup();
|
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import Bull from 'bull';
|
import Bull from 'bull';
|
||||||
import * as tmp from 'tmp';
|
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
|
|
||||||
import { queueLogger } from '../../logger.js';
|
import { queueLogger } from '../../logger.js';
|
||||||
|
@ -10,6 +9,7 @@ import { MoreThan } from 'typeorm';
|
||||||
import { Note } from '@/models/entities/note.js';
|
import { Note } from '@/models/entities/note.js';
|
||||||
import { Poll } from '@/models/entities/poll.js';
|
import { Poll } from '@/models/entities/poll.js';
|
||||||
import { DbUserJobData } from '@/queue/types.js';
|
import { DbUserJobData } from '@/queue/types.js';
|
||||||
|
import { createTemp } from '@/misc/create-temp.js';
|
||||||
|
|
||||||
const logger = queueLogger.createSubLogger('export-notes');
|
const logger = queueLogger.createSubLogger('export-notes');
|
||||||
|
|
||||||
|
@ -23,82 +23,81 @@ export async function exportNotes(job: Bull.Job<DbUserJobData>, done: any): Prom
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create temp file
|
// Create temp file
|
||||||
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
|
const [path, cleanup] = await createTemp();
|
||||||
tmp.file((e, path, fd, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Temp file is ${path}`);
|
logger.info(`Temp file is ${path}`);
|
||||||
|
|
||||||
const stream = fs.createWriteStream(path, { flags: 'a' });
|
try {
|
||||||
|
const stream = fs.createWriteStream(path, { flags: 'a' });
|
||||||
|
|
||||||
const write = (text: string): Promise<void> => {
|
const write = (text: string): Promise<void> => {
|
||||||
return new Promise<void>((res, rej) => {
|
return new Promise<void>((res, rej) => {
|
||||||
stream.write(text, err => {
|
stream.write(text, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
logger.error(err);
|
logger.error(err);
|
||||||
rej(err);
|
rej(err);
|
||||||
} else {
|
} else {
|
||||||
res();
|
res();
|
||||||
}
|
}
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
};
|
||||||
};
|
|
||||||
|
|
||||||
await write('[');
|
await write('[');
|
||||||
|
|
||||||
let exportedNotesCount = 0;
|
let exportedNotesCount = 0;
|
||||||
let cursor: Note['id'] | null = null;
|
let cursor: Note['id'] | null = null;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const notes = await Notes.find({
|
const notes = await Notes.find({
|
||||||
where: {
|
where: {
|
||||||
userId: user.id,
|
userId: user.id,
|
||||||
...(cursor ? { id: MoreThan(cursor) } : {}),
|
...(cursor ? { id: MoreThan(cursor) } : {}),
|
||||||
},
|
},
|
||||||
take: 100,
|
take: 100,
|
||||||
order: {
|
order: {
|
||||||
id: 1,
|
id: 1,
|
||||||
},
|
},
|
||||||
}) as Note[];
|
}) as Note[];
|
||||||
|
|
||||||
if (notes.length === 0) {
|
if (notes.length === 0) {
|
||||||
job.progress(100);
|
job.progress(100);
|
||||||
break;
|
break;
|
||||||
}
|
|
||||||
|
|
||||||
cursor = notes[notes.length - 1].id;
|
|
||||||
|
|
||||||
for (const note of notes) {
|
|
||||||
let poll: Poll | undefined;
|
|
||||||
if (note.hasPoll) {
|
|
||||||
poll = await Polls.findOneByOrFail({ noteId: note.id });
|
|
||||||
}
|
}
|
||||||
const content = JSON.stringify(serialize(note, poll));
|
|
||||||
const isFirst = exportedNotesCount === 0;
|
cursor = notes[notes.length - 1].id;
|
||||||
await write(isFirst ? content : ',\n' + content);
|
|
||||||
exportedNotesCount++;
|
for (const note of notes) {
|
||||||
|
let poll: Poll | undefined;
|
||||||
|
if (note.hasPoll) {
|
||||||
|
poll = await Polls.findOneByOrFail({ noteId: note.id });
|
||||||
|
}
|
||||||
|
const content = JSON.stringify(serialize(note, poll));
|
||||||
|
const isFirst = exportedNotesCount === 0;
|
||||||
|
await write(isFirst ? content : ',\n' + content);
|
||||||
|
exportedNotesCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const total = await Notes.countBy({
|
||||||
|
userId: user.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
job.progress(exportedNotesCount / total);
|
||||||
}
|
}
|
||||||
|
|
||||||
const total = await Notes.countBy({
|
await write(']');
|
||||||
userId: user.id,
|
|
||||||
});
|
|
||||||
|
|
||||||
job.progress(exportedNotesCount / total);
|
stream.end();
|
||||||
|
logger.succ(`Exported to: ${path}`);
|
||||||
|
|
||||||
|
const fileName = 'notes-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.json';
|
||||||
|
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
||||||
|
|
||||||
|
logger.succ(`Exported to: ${driveFile.id}`);
|
||||||
|
} finally {
|
||||||
|
cleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
await write(']');
|
|
||||||
|
|
||||||
stream.end();
|
|
||||||
logger.succ(`Exported to: ${path}`);
|
|
||||||
|
|
||||||
const fileName = 'notes-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.json';
|
|
||||||
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
|
||||||
|
|
||||||
logger.succ(`Exported to: ${driveFile.id}`);
|
|
||||||
cleanup();
|
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
import Bull from 'bull';
|
import Bull from 'bull';
|
||||||
import * as tmp from 'tmp';
|
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
|
|
||||||
import { queueLogger } from '../../logger.js';
|
import { queueLogger } from '../../logger.js';
|
||||||
import { addFile } from '@/services/drive/add-file.js';
|
import { addFile } from '@/services/drive/add-file.js';
|
||||||
import { format as dateFormat } from 'date-fns';
|
import { format as dateFormat } from 'date-fns';
|
||||||
import { getFullApAccount } from '@/misc/convert-host.js';
|
import { getFullApAccount } from '@/misc/convert-host.js';
|
||||||
|
import { createTemp } from '@/misc/create-temp.js';
|
||||||
import { Users, UserLists, UserListJoinings } from '@/models/index.js';
|
import { Users, UserLists, UserListJoinings } from '@/models/index.js';
|
||||||
import { In } from 'typeorm';
|
import { In } from 'typeorm';
|
||||||
import { DbUserJobData } from '@/queue/types.js';
|
import { DbUserJobData } from '@/queue/types.js';
|
||||||
|
@ -26,46 +26,45 @@ export async function exportUserLists(job: Bull.Job<DbUserJobData>, done: any):
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create temp file
|
// Create temp file
|
||||||
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
|
const [path, cleanup] = await createTemp();
|
||||||
tmp.file((e, path, fd, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Temp file is ${path}`);
|
logger.info(`Temp file is ${path}`);
|
||||||
|
|
||||||
const stream = fs.createWriteStream(path, { flags: 'a' });
|
try {
|
||||||
|
const stream = fs.createWriteStream(path, { flags: 'a' });
|
||||||
|
|
||||||
for (const list of lists) {
|
for (const list of lists) {
|
||||||
const joinings = await UserListJoinings.findBy({ userListId: list.id });
|
const joinings = await UserListJoinings.findBy({ userListId: list.id });
|
||||||
const users = await Users.findBy({
|
const users = await Users.findBy({
|
||||||
id: In(joinings.map(j => j.userId)),
|
id: In(joinings.map(j => j.userId)),
|
||||||
});
|
|
||||||
|
|
||||||
for (const u of users) {
|
|
||||||
const acct = getFullApAccount(u.username, u.host);
|
|
||||||
const content = `${list.name},${acct}`;
|
|
||||||
await new Promise<void>((res, rej) => {
|
|
||||||
stream.write(content + '\n', err => {
|
|
||||||
if (err) {
|
|
||||||
logger.error(err);
|
|
||||||
rej(err);
|
|
||||||
} else {
|
|
||||||
res();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
for (const u of users) {
|
||||||
|
const acct = getFullApAccount(u.username, u.host);
|
||||||
|
const content = `${list.name},${acct}`;
|
||||||
|
await new Promise<void>((res, rej) => {
|
||||||
|
stream.write(content + '\n', err => {
|
||||||
|
if (err) {
|
||||||
|
logger.error(err);
|
||||||
|
rej(err);
|
||||||
|
} else {
|
||||||
|
res();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stream.end();
|
||||||
|
logger.succ(`Exported to: ${path}`);
|
||||||
|
|
||||||
|
const fileName = 'user-lists-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
|
||||||
|
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
||||||
|
|
||||||
|
logger.succ(`Exported to: ${driveFile.id}`);
|
||||||
|
} finally {
|
||||||
|
cleanup();
|
||||||
}
|
}
|
||||||
|
|
||||||
stream.end();
|
|
||||||
logger.succ(`Exported to: ${path}`);
|
|
||||||
|
|
||||||
const fileName = 'user-lists-' + dateFormat(new Date(), 'yyyy-MM-dd-HH-mm-ss') + '.csv';
|
|
||||||
const driveFile = await addFile({ user, path, name: fileName, force: true });
|
|
||||||
|
|
||||||
logger.succ(`Exported to: ${driveFile.id}`);
|
|
||||||
cleanup();
|
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
import Bull from 'bull';
|
import Bull from 'bull';
|
||||||
import * as tmp from 'tmp';
|
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
import unzipper from 'unzipper';
|
import unzipper from 'unzipper';
|
||||||
|
|
||||||
import { queueLogger } from '../../logger.js';
|
import { queueLogger } from '../../logger.js';
|
||||||
|
import { createTempDir } from '@/misc/create-temp.js';
|
||||||
import { downloadUrl } from '@/misc/download-url.js';
|
import { downloadUrl } from '@/misc/download-url.js';
|
||||||
import { DriveFiles, Emojis } from '@/models/index.js';
|
import { DriveFiles, Emojis } from '@/models/index.js';
|
||||||
import { DbUserImportJobData } from '@/queue/types.js';
|
import { DbUserImportJobData } from '@/queue/types.js';
|
||||||
|
@ -25,13 +25,7 @@ export async function importCustomEmojis(job: Bull.Job<DbUserImportJobData>, don
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create temp dir
|
const [path, cleanup] = await createTempDir();
|
||||||
const [path, cleanup] = await new Promise<[string, () => void]>((res, rej) => {
|
|
||||||
tmp.dir((e, path, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Temp dir is ${path}`);
|
logger.info(`Temp dir is ${path}`);
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,11 @@ import { dirname } from 'node:path';
|
||||||
import Koa from 'koa';
|
import Koa from 'koa';
|
||||||
import send from 'koa-send';
|
import send from 'koa-send';
|
||||||
import rename from 'rename';
|
import rename from 'rename';
|
||||||
import * as tmp from 'tmp';
|
|
||||||
import { serverLogger } from '../index.js';
|
import { serverLogger } from '../index.js';
|
||||||
import { contentDisposition } from '@/misc/content-disposition.js';
|
import { contentDisposition } from '@/misc/content-disposition.js';
|
||||||
import { DriveFiles } from '@/models/index.js';
|
import { DriveFiles } from '@/models/index.js';
|
||||||
import { InternalStorage } from '@/services/drive/internal-storage.js';
|
import { InternalStorage } from '@/services/drive/internal-storage.js';
|
||||||
|
import { createTemp } from '@/misc/create-temp.js';
|
||||||
import { downloadUrl } from '@/misc/download-url.js';
|
import { downloadUrl } from '@/misc/download-url.js';
|
||||||
import { detectType } from '@/misc/get-file-info.js';
|
import { detectType } from '@/misc/get-file-info.js';
|
||||||
import { convertToWebp, convertToJpeg, convertToPng } from '@/services/drive/image-processor.js';
|
import { convertToWebp, convertToJpeg, convertToPng } from '@/services/drive/image-processor.js';
|
||||||
|
@ -50,12 +50,7 @@ export default async function(ctx: Koa.Context) {
|
||||||
|
|
||||||
if (!file.storedInternal) {
|
if (!file.storedInternal) {
|
||||||
if (file.isLink && file.uri) { // 期限切れリモートファイル
|
if (file.isLink && file.uri) { // 期限切れリモートファイル
|
||||||
const [path, cleanup] = await new Promise<[string, any]>((res, rej) => {
|
const [path, cleanup] = await createTemp();
|
||||||
tmp.file((e, path, fd, cleanup) => {
|
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await downloadUrl(file.uri, path);
|
await downloadUrl(file.uri, path);
|
||||||
|
|
|
@ -89,10 +89,10 @@ router.get('/avatar/@:acct', async ctx => {
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get('/identicon/:x', async ctx => {
|
router.get('/identicon/:x', async ctx => {
|
||||||
const [temp] = await createTemp();
|
const [temp, cleanup] = await createTemp();
|
||||||
await genIdenticon(ctx.params.x, fs.createWriteStream(temp));
|
await genIdenticon(ctx.params.x, fs.createWriteStream(temp));
|
||||||
ctx.set('Content-Type', 'image/png');
|
ctx.set('Content-Type', 'image/png');
|
||||||
ctx.body = fs.createReadStream(temp);
|
ctx.body = fs.createReadStream(temp).on('close', () => cleanup());
|
||||||
});
|
});
|
||||||
|
|
||||||
router.get('/verify-email/:code', async ctx => {
|
router.get('/verify-email/:code', async ctx => {
|
||||||
|
|
|
@ -1,38 +1,31 @@
|
||||||
import * as fs from 'node:fs';
|
import * as fs from 'node:fs';
|
||||||
import * as tmp from 'tmp';
|
import * as path from 'node:path';
|
||||||
|
import { createTemp } from '@/misc/create-temp.js';
|
||||||
import { IImage, convertToJpeg } from './image-processor.js';
|
import { IImage, convertToJpeg } from './image-processor.js';
|
||||||
import FFmpeg from 'fluent-ffmpeg';
|
import FFmpeg from 'fluent-ffmpeg';
|
||||||
|
|
||||||
export async function GenerateVideoThumbnail(path: string): Promise<IImage> {
|
export async function GenerateVideoThumbnail(source: string): Promise<IImage> {
|
||||||
const [outDir, cleanup] = await new Promise<[string, any]>((res, rej) => {
|
const [file, cleanup] = await createTemp();
|
||||||
tmp.dir((e, path, cleanup) => {
|
const parsed = path.parse(file);
|
||||||
if (e) return rej(e);
|
|
||||||
res([path, cleanup]);
|
try {
|
||||||
|
await new Promise((res, rej) => {
|
||||||
|
FFmpeg({
|
||||||
|
source,
|
||||||
|
})
|
||||||
|
.on('end', res)
|
||||||
|
.on('error', rej)
|
||||||
|
.screenshot({
|
||||||
|
folder: parsed.dir,
|
||||||
|
filename: parsed.base,
|
||||||
|
count: 1,
|
||||||
|
timestamps: ['5%'],
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
|
||||||
|
|
||||||
await new Promise((res, rej) => {
|
// JPEGに変換 (Webpでもいいが、MastodonはWebpをサポートせず表示できなくなる)
|
||||||
FFmpeg({
|
return await convertToJpeg(498, 280);
|
||||||
source: path,
|
} finally {
|
||||||
})
|
cleanup();
|
||||||
.on('end', res)
|
}
|
||||||
.on('error', rej)
|
|
||||||
.screenshot({
|
|
||||||
folder: outDir,
|
|
||||||
filename: 'output.png',
|
|
||||||
count: 1,
|
|
||||||
timestamps: ['5%'],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
const outPath = `${outDir}/output.png`;
|
|
||||||
|
|
||||||
// JPEGに変換 (Webpでもいいが、MastodonはWebpをサポートせず表示できなくなる)
|
|
||||||
const thumbnail = await convertToJpeg(outPath, 498, 280);
|
|
||||||
|
|
||||||
// cleanup
|
|
||||||
await fs.promises.unlink(outPath);
|
|
||||||
cleanup();
|
|
||||||
|
|
||||||
return thumbnail;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,29 +45,20 @@ export async function uploadFromUrl({
|
||||||
// Create temp file
|
// Create temp file
|
||||||
const [path, cleanup] = await createTemp();
|
const [path, cleanup] = await createTemp();
|
||||||
|
|
||||||
// write content at URL to temp file
|
|
||||||
await downloadUrl(url, path);
|
|
||||||
|
|
||||||
let driveFile: DriveFile;
|
|
||||||
let error;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
driveFile = await addFile({ user, path, name, comment, folderId, force, isLink, url, uri, sensitive });
|
// write content at URL to temp file
|
||||||
|
await downloadUrl(url, path);
|
||||||
|
|
||||||
|
const driveFile = await addFile({ user, path, name, comment, folderId, force, isLink, url, uri, sensitive });
|
||||||
logger.succ(`Got: ${driveFile.id}`);
|
logger.succ(`Got: ${driveFile.id}`);
|
||||||
|
return driveFile!;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
error = e;
|
|
||||||
logger.error(`Failed to create drive file: ${e}`, {
|
logger.error(`Failed to create drive file: ${e}`, {
|
||||||
url: url,
|
url: url,
|
||||||
e: e,
|
e: e,
|
||||||
});
|
});
|
||||||
}
|
throw e;
|
||||||
|
} finally {
|
||||||
// clean-up
|
cleanup();
|
||||||
cleanup();
|
|
||||||
|
|
||||||
if (error) {
|
|
||||||
throw error;
|
|
||||||
} else {
|
|
||||||
return driveFile!;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue