Merge branch 'import-with-update-n-key-self-thread' into 'develop'

import all misskey self note thread and allow import multiple times to restore missing parts

See merge request firefish/firefish!10548
This commit is contained in:
Kainoa Kanter 2023-07-31 23:24:42 +00:00
commit 4486cc7151
7 changed files with 157 additions and 35 deletions

View File

@ -1,16 +1,18 @@
export type Post = {
text: string | null;
text: string | undefined;
cw: string | null;
localOnly: boolean;
createdAt: Date;
visibility: string;
};
export function parse(acct: any): Post {
return {
text: acct.text,
text: acct.text || undefined,
cw: acct.cw,
localOnly: acct.localOnly,
createdAt: new Date(acct.createdAt),
visibility: "hidden" + (acct.visibility || ""),
};
}

View File

@ -28,6 +28,7 @@ import {
backgroundQueue,
} from "./queues.js";
import type { ThinUser } from "./types.js";
import { Note } from "@/models/entities/note.js";
function renderError(e: Error): any {
return {
@ -358,6 +359,7 @@ export function createImportCkPostJob(
user: ThinUser,
post: any,
signatureCheck: boolean,
parent: Note | null = null,
) {
return dbQueue.add(
"importCkPost",
@ -365,6 +367,7 @@ export function createImportCkPostJob(
user: user,
post: post,
signatureCheck: signatureCheck,
parent: parent,
},
{
removeOnComplete: true,

View File

@ -3,7 +3,13 @@ import create from "@/services/note/create.js";
import { Users } from "@/models/index.js";
import type { DbUserImportMastoPostJobData } from "@/queue/types.js";
import { queueLogger } from "../../logger.js";
import { uploadFromUrl } from "@/services/drive/upload-from-url.js";
import type { DriveFile } from "@/models/entities/drive-file.js";
import type Bull from "bull";
import { createImportCkPostJob } from "@/queue/index.js";
import { Notes, NoteEdits } from "@/models/index.js";
import type { Note } from "@/models/entities/note.js";
import { genId } from "@/misc/gen-id.js";
const logger = queueLogger.createSubLogger("import-firefish-post");
@ -17,6 +23,7 @@ export async function importCkPost(
return;
}
const post = job.data.post;
/*
if (post.replyId != null) {
done();
return;
@ -29,23 +36,74 @@ export async function importCkPost(
done();
return;
}
const { text, cw, localOnly, createdAt } = Post.parse(post);
const note = await create(user, {
*/
const urls = (post.files || [])
.map((x: any) => x.url)
.filter((x: String) => x.startsWith("http"));
const files: DriveFile[] = [];
for (const url of urls) {
try {
const file = await uploadFromUrl({
url: url,
user: user,
});
files.push(file);
} catch (e) {
logger.error(`Skipped adding file to drive: ${url}`);
}
}
const { text, cw, localOnly, createdAt, visibility } = Post.parse(post);
let note = await Notes.findOneBy({
createdAt: createdAt,
files: undefined,
poll: undefined,
text: text || undefined,
reply: null,
renote: null,
cw: cw,
localOnly,
visibility: "hidden",
visibleUsers: [],
channel: null,
apMentions: new Array(0),
apHashtags: undefined,
apEmojis: undefined,
text: text,
userId: user.id,
});
if (note && (note?.fileIds?.length || 0) < files.length) {
const update: Partial<Note> = {};
update.fileIds = files.map((x) => x.id);
await Notes.update(note.id, update);
await NoteEdits.insert({
id: genId(),
noteId: note.id,
text: note.text || undefined,
cw: note.cw,
fileIds: note.fileIds,
updatedAt: new Date(),
});
logger.info(`Note file updated`);
}
if (!note) {
note = await create(user, {
createdAt: createdAt,
files: files.length == 0 ? undefined : files,
poll: undefined,
text: text || undefined,
reply: post.replyId ? job.data.parent : null,
renote: post.renoteId ? job.data.parent : null,
cw: cw,
localOnly,
visibility: visibility,
visibleUsers: [],
channel: null,
apMentions: new Array(0),
apHashtags: undefined,
apEmojis: undefined,
});
logger.info(`Create new note`);
} else {
logger.info(`Note exist`);
}
logger.succ("Imported");
if (post.childNotes) {
for (const child of post.childNotes) {
createImportCkPostJob(
job.data.user,
child,
job.data.signatureCheck,
note,
);
}
}
done();
}

View File

@ -8,6 +8,9 @@ import { resolveNote } from "@/remote/activitypub/models/note.js";
import { Note } from "@/models/entities/note.js";
import { uploadFromUrl } from "@/services/drive/upload-from-url.js";
import type { DriveFile } from "@/models/entities/drive-file.js";
import { Notes, NoteEdits } from "@/models/index.js";
import type { Note } from "@/models/entities/note.js";
import { genId } from "@/misc/gen-id.js";
const logger = queueLogger.createSubLogger("import-masto-post");
@ -67,23 +70,47 @@ export async function importMastoPost(
}
}
}
const note = await create(user, {
let note = await Notes.findOneBy({
createdAt: new Date(post.object.published),
files: files.length == 0 ? undefined : files,
poll: undefined,
text: text || undefined,
reply,
renote: null,
cw: post.object.sensitive ? post.object.summary : undefined,
localOnly: false,
visibility: "hidden",
visibleUsers: [],
channel: null,
apMentions: new Array(0),
apHashtags: undefined,
apEmojis: undefined,
text: text,
userId: user.id,
});
if (note && (note?.fileIds?.length || 0) < files.length) {
const update: Partial<Note> = {};
update.fileIds = files.map((x) => x.id);
await Notes.update(note.id, update);
await NoteEdits.insert({
id: genId(),
noteId: note.id,
text: note.text || undefined,
cw: note.cw,
fileIds: note.fileIds,
updatedAt: new Date(),
});
logger.info(`Note file updated`);
}
if (!note) {
note = await create(user, {
createdAt: new Date(post.object.published),
files: files.length == 0 ? undefined : files,
poll: undefined,
text: text || undefined,
reply,
renote: null,
cw: post.object.sensitive ? post.object.summary : undefined,
localOnly: false,
visibility: "hiddenpublic",
visibleUsers: [],
channel: null,
apMentions: new Array(0),
apHashtags: undefined,
apEmojis: undefined,
});
logger.info(`Create new note`);
} else {
logger.info(`Note exist`);
}
job.progress(100);
done();

View File

@ -57,7 +57,8 @@ export async function importPosts(
const parsed = JSON.parse(json);
if (parsed instanceof Array) {
logger.info("Parsing key style posts");
for (const post of JSON.parse(json)) {
const arr = recreateChain(parsed);
for (const post of arr) {
createImportCkPostJob(job.data.user, post, job.data.signatureCheck);
}
} else if (parsed instanceof Object) {
@ -74,3 +75,32 @@ export async function importPosts(
logger.succ("Imported");
done();
}
function recreateChain(arr: any[]): any {
type NotesMap = {
[id: string]: any;
};
const notesTree: any[] = [];
const lookup: NotesMap = {};
for (const note of arr) {
lookup[`${note.id}`] = note;
note.childNotes = [];
if (note.replyId == null && note.renoteId == null) {
notesTree.push(note);
}
}
for (const note of arr) {
let parent = null;
if (note.replyId != null) {
parent = lookup[`${note.replyId}`];
}
if (note.renoteId != null) {
parent = lookup[`${note.renoteId}`];
}
if (parent) {
parent.childNotes.push(note);
}
}
return notesTree;
}

View File

@ -52,6 +52,7 @@ export type DbUserImportMastoPostJobData = {
user: ThinUser;
post: any;
signatureCheck: boolean;
parent: Note | null;
};
export type ObjectStorageJobData =

View File

@ -172,7 +172,7 @@ export default async (
// rome-ignore lint/suspicious/noAsyncPromiseExecutor: FIXME
new Promise<Note>(async (res, rej) => {
const dontFederateInitially =
data.localOnly || data.visibility === "hidden";
data.localOnly || data.visibility?.startsWith("hidden");
// If you reply outside the channel, match the scope of the target.
// TODO (I think it's a process that could be done on the client side, but it's server side for now.)
@ -206,7 +206,8 @@ export default async (
if (data.channel != null) data.visibility = "public";
if (data.channel != null) data.visibleUsers = [];
if (data.channel != null) data.localOnly = true;
if (data.visibility === "hidden") data.visibility = "public";
if (data.visibility.startsWith("hidden"))
data.visibility = data.visibility.slice(6);
// enforce silent clients on server
if (