Implement follower and following searches
This commit is contained in:
parent
b6638a7862
commit
0d41422423
@ -4,8 +4,8 @@ import {dbLogger} from "./logger.js";
|
|||||||
import config from "@/config/index.js";
|
import config from "@/config/index.js";
|
||||||
import {Note} from "@/models/entities/note.js";
|
import {Note} from "@/models/entities/note.js";
|
||||||
import * as url from "url";
|
import * as url from "url";
|
||||||
import {User} from "@/models/entities/user.js";
|
import {ILocalUser, User} from "@/models/entities/user.js";
|
||||||
import {Users} from "@/models/index.js";
|
import {Followings, Users} from "@/models/index.js";
|
||||||
|
|
||||||
const logger = dbLogger.createSubLogger("meilisearch", "gray", false);
|
const logger = dbLogger.createSubLogger("meilisearch", "gray", false);
|
||||||
|
|
||||||
@ -41,6 +41,7 @@ posts
|
|||||||
"userHost",
|
"userHost",
|
||||||
"mediaAttachment",
|
"mediaAttachment",
|
||||||
"createdAt",
|
"createdAt",
|
||||||
|
"userId",
|
||||||
])
|
])
|
||||||
.catch((e) =>
|
.catch((e) =>
|
||||||
logger.error(
|
logger.error(
|
||||||
@ -48,6 +49,14 @@ posts
|
|||||||
),
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
posts
|
||||||
|
.updateSortableAttributes(["createdAt"])
|
||||||
|
.catch((e) =>
|
||||||
|
logger.error(
|
||||||
|
`Setting sortable attr failed, placeholder searches won't sort properly: ${e}`,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
logger.info("Connected to MeiliSearch");
|
logger.info("Connected to MeiliSearch");
|
||||||
|
|
||||||
export type MeilisearchNote = {
|
export type MeilisearchNote = {
|
||||||
@ -63,60 +72,130 @@ export type MeilisearchNote = {
|
|||||||
|
|
||||||
export default hasConfig
|
export default hasConfig
|
||||||
? {
|
? {
|
||||||
search: (query: string, limit: number, offset: number) => {
|
search: async (
|
||||||
|
query: string,
|
||||||
|
limit: number,
|
||||||
|
offset: number,
|
||||||
|
userCtx: ILocalUser | null,
|
||||||
|
) => {
|
||||||
/// Advanced search syntax
|
/// Advanced search syntax
|
||||||
/// from:user => filter by user + optional domain
|
/// from:user => filter by user + optional domain
|
||||||
/// has:image/video/audio/text/file => filter by attachment types
|
/// has:image/video/audio/text/file => filter by attachment types
|
||||||
/// domain:domain.com => filter by domain
|
/// domain:domain.com => filter by domain
|
||||||
/// before:Date => show posts made before Date
|
/// before:Date => show posts made before Date
|
||||||
/// after: Date => show posts made after Date
|
/// after: Date => show posts made after Date
|
||||||
|
/// "text" => get posts with exact text between quotes
|
||||||
|
/// filter:following => show results only from users you follow
|
||||||
|
/// filter:followers => show results only from followers
|
||||||
|
|
||||||
let constructedFilters: string[] = [];
|
let constructedFilters: string[] = [];
|
||||||
|
|
||||||
let splitSearch = query.split(" ");
|
let splitSearch = query.split(" ");
|
||||||
|
|
||||||
// Detect search operators and remove them from the actual query
|
// Detect search operators and remove them from the actual query
|
||||||
splitSearch = splitSearch.filter((term) => {
|
let filteredSearchTerms = (
|
||||||
if (term.startsWith("has:")) {
|
await Promise.all(
|
||||||
let fileType = term.slice(4);
|
splitSearch.map(async (term) => {
|
||||||
constructedFilters.push(`mediaAttachment = "${fileType}"`);
|
if (term.startsWith("has:")) {
|
||||||
return false;
|
let fileType = term.slice(4);
|
||||||
} else if (term.startsWith("from:")) {
|
constructedFilters.push(`mediaAttachment = "${fileType}"`);
|
||||||
let user = term.slice(5);
|
return null;
|
||||||
constructedFilters.push(`userName = ${user}`);
|
} else if (term.startsWith("from:")) {
|
||||||
return false;
|
let user = term.slice(5);
|
||||||
} else if (term.startsWith("domain:")) {
|
constructedFilters.push(`userName = ${user}`);
|
||||||
let domain = term.slice(7);
|
return null;
|
||||||
constructedFilters.push(`userHost = ${domain}`);
|
} else if (term.startsWith("domain:")) {
|
||||||
return false;
|
let domain = term.slice(7);
|
||||||
} else if (term.startsWith("after:")) {
|
constructedFilters.push(`userHost = ${domain}`);
|
||||||
let timestamp = term.slice(6);
|
return null;
|
||||||
// Try to parse the timestamp as JavaScript Date
|
} else if (term.startsWith("after:")) {
|
||||||
let date = Date.parse(timestamp);
|
let timestamp = term.slice(6);
|
||||||
if (isNaN(date)) return false;
|
// Try to parse the timestamp as JavaScript Date
|
||||||
constructedFilters.push(`createdAt > ${date}`);
|
let date = Date.parse(timestamp);
|
||||||
return false;
|
if (isNaN(date)) return null;
|
||||||
} else if (term.startsWith("before:")) {
|
constructedFilters.push(`createdAt > ${date / 1000}`);
|
||||||
let timestamp = term.slice(7);
|
return null;
|
||||||
// Try to parse the timestamp as JavaScript Date
|
} else if (term.startsWith("before:")) {
|
||||||
let date = Date.parse(timestamp);
|
let timestamp = term.slice(7);
|
||||||
if (isNaN(date)) return false;
|
// Try to parse the timestamp as JavaScript Date
|
||||||
constructedFilters.push(`createdAt < ${date}`);
|
let date = Date.parse(timestamp);
|
||||||
return false;
|
if (isNaN(date)) return null;
|
||||||
}
|
constructedFilters.push(`createdAt < ${date / 1000}`);
|
||||||
|
return null;
|
||||||
|
} else if (term.startsWith("filter:following")) {
|
||||||
|
// Check if we got a context user
|
||||||
|
if (userCtx) {
|
||||||
|
// Fetch user follows from DB
|
||||||
|
let followedUsers = await Followings.find({
|
||||||
|
where: {
|
||||||
|
followerId: userCtx.id,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
followeeId: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
let followIDs = followedUsers.map((user) => user.followeeId);
|
||||||
|
|
||||||
return true;
|
if (followIDs.length === 0) return null;
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Searching for ${splitSearch.join(" ")}`);
|
constructedFilters.push(`userId IN [${followIDs.join(",")}]`);
|
||||||
|
} else {
|
||||||
|
logger.warn(
|
||||||
|
"search filtered to follows called without user context",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
} else if (term.startsWith("filter:followers")) {
|
||||||
|
// Check if we got a context user
|
||||||
|
if (userCtx) {
|
||||||
|
// Fetch users follows from DB
|
||||||
|
let followedUsers = await Followings.find({
|
||||||
|
where: {
|
||||||
|
followeeId: userCtx.id,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
followerId: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
let followIDs = followedUsers.map((user) => user.followerId);
|
||||||
|
|
||||||
|
if (followIDs.length === 0) return null;
|
||||||
|
|
||||||
|
constructedFilters.push(`userId IN [${followIDs.join(",")}]`);
|
||||||
|
} else {
|
||||||
|
logger.warn(
|
||||||
|
"search filtered to followers called without user context",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return term;
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
).filter((term) => term !== null);
|
||||||
|
|
||||||
|
let sortRules = [];
|
||||||
|
|
||||||
|
// An empty search term with defined filters means we have a placeholder search => https://www.meilisearch.com/docs/reference/api/search#placeholder-search
|
||||||
|
// These have to be ordered manually, otherwise the *oldest* posts are returned first, which we don't want
|
||||||
|
if (filteredSearchTerms.length === 0 && constructedFilters.length > 0) {
|
||||||
|
sortRules.push("createdAt:desc");
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`Searching for ${filteredSearchTerms.join(" ")}`);
|
||||||
logger.info(`Limit: ${limit}`);
|
logger.info(`Limit: ${limit}`);
|
||||||
logger.info(`Offset: ${offset}`);
|
logger.info(`Offset: ${offset}`);
|
||||||
logger.info(`Filters: ${constructedFilters}`);
|
logger.info(`Filters: ${constructedFilters}`);
|
||||||
|
logger.info(`Ordering: ${sortRules}`);
|
||||||
|
|
||||||
return posts.search(splitSearch.join(" "), {
|
return posts.search(filteredSearchTerms.join(" "), {
|
||||||
limit: limit,
|
limit: limit,
|
||||||
offset: offset,
|
offset: offset,
|
||||||
filter: constructedFilters,
|
filter: constructedFilters,
|
||||||
|
sort: sortRules,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
ingestNote: async (ingestNotes: Note | Note[]) => {
|
ingestNote: async (ingestNotes: Note | Note[]) => {
|
||||||
@ -128,12 +207,11 @@ export default hasConfig
|
|||||||
|
|
||||||
for (let note of ingestNotes) {
|
for (let note of ingestNotes) {
|
||||||
if (note.user === undefined) {
|
if (note.user === undefined) {
|
||||||
let user = await Users.findOne({
|
note.user = await Users.findOne({
|
||||||
where: {
|
where: {
|
||||||
id: note.userId,
|
id: note.userId,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
note.user = user;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let attachmentType = "";
|
let attachmentType = "";
|
||||||
@ -166,11 +244,13 @@ export default hasConfig
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let indexingIDs = indexingBatch.map((note) => note.id);
|
return posts
|
||||||
|
.addDocuments(indexingBatch, {
|
||||||
return posts.addDocuments(indexingBatch, {
|
primaryKey: "id",
|
||||||
primaryKey: "id",
|
})
|
||||||
});
|
.then(() =>
|
||||||
|
console.log(`sent ${indexingBatch.length} posts for indexing`),
|
||||||
|
);
|
||||||
},
|
},
|
||||||
serverStats: async () => {
|
serverStats: async () => {
|
||||||
let health: Health = await client.health();
|
let health: Health = await client.health();
|
||||||
|
@ -179,7 +179,7 @@ export default define(meta, paramDef, async (ps, me) => {
|
|||||||
// Use meilisearch to fetch and step through all search results that could match the requirements
|
// Use meilisearch to fetch and step through all search results that could match the requirements
|
||||||
const ids = [];
|
const ids = [];
|
||||||
while (true) {
|
while (true) {
|
||||||
const results = await meilisearch.search(ps.query, chunkSize, start);
|
const results = await meilisearch.search(ps.query, chunkSize, start, me);
|
||||||
|
|
||||||
start += chunkSize;
|
start += chunkSize;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user