chore (backend): revise queue logs (#10907)

This commit is contained in:
naskya 2024-04-25 08:03:38 +09:00
parent bf9ab9c1ca
commit 322b2392de
No known key found for this signature in database
GPG key ID: 712D413B3A9FED5C
25 changed files with 103 additions and 77 deletions

View file

@ -13,7 +13,7 @@ const logger = queueLogger.createSubLogger("delete-account");
export async function deleteAccount(
job: Bull.Job<DbUserDeleteJobData>,
): Promise<string | void> {
logger.info(`Deleting account of ${job.data.user.id} ...`);
logger.info(`Deleting account ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (!user) return;
@ -43,7 +43,7 @@ export async function deleteAccount(
await Notes.delete(notes.map((note) => note.id));
}
logger.info("All of notes deleted");
logger.info(`All posts of user ${job.data.user.id} were deleted`);
}
{
@ -73,7 +73,7 @@ export async function deleteAccount(
}
}
logger.info("All of files deleted");
logger.info(`All files of user ${job.data.user.id} were deleted`);
}
{

View file

@ -54,8 +54,6 @@ export async function deleteDriveFiles(
job.progress(deletedCount / total);
}
logger.info(
`All drive files (${deletedCount}) of ${user.id} has been deleted.`,
);
logger.info(`${deletedCount} drive files of user ${user.id} were deleted.`);
done();
}

View file

@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
import { Users, Blockings } from "@/models/index.js";
import { MoreThan } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-blocking");
@ -27,7 +28,7 @@ export async function exportBlocking(
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
logger.info(`temp file created: ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
@ -63,9 +64,10 @@ export async function exportBlocking(
const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => {
stream.write(`${content}\n`, (err) => {
if (err) {
logger.error(err);
logger.warn("failed");
logger.info(inspect(err));
rej(err);
} else {
res();

View file

@ -29,7 +29,7 @@ export async function exportCustomEmojis(
const [path, cleanup] = await createTempDir();
logger.info(`Temp dir is ${path}`);
logger.info(`temp dir created: ${path}`);
const metaPath = `${path}/meta.json`;
@ -41,7 +41,8 @@ export async function exportCustomEmojis(
return new Promise<void>((res, rej) => {
metaStream.write(text, (err) => {
if (err) {
logger.error(err);
logger.warn("Failed to export custom emojis");
logger.info(inspect(err));
rej(err);
} else {
res();

View file

@ -10,6 +10,7 @@ import { Users, Followings, Mutings } from "@/models/index.js";
import { In, MoreThan, Not } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
import type { Following } from "@/models/entities/following.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-following");
@ -28,7 +29,7 @@ export async function exportFollowing(
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
logger.info(`temp file created: ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
@ -78,9 +79,12 @@ export async function exportFollowing(
const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => {
stream.write(`${content}\n`, (err) => {
if (err) {
logger.error(err);
logger.warn(
`failed to export following users of ${job.data.user.id}`,
);
logger.info(inspect(err));
rej(err);
} else {
res();

View file

@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
import { Users, Mutings } from "@/models/index.js";
import { IsNull, MoreThan } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-mute");
@ -16,7 +17,7 @@ export async function exportMute(
job: Bull.Job<DbUserJobData>,
done: any,
): Promise<void> {
logger.info(`Exporting mute of ${job.data.user.id} ...`);
logger.info(`Exporting mutes of ${job.data.user.id} ...`);
const user = await Users.findOneBy({ id: job.data.user.id });
if (user == null) {
@ -27,7 +28,7 @@ export async function exportMute(
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
logger.info(`temp file created: ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
@ -64,9 +65,10 @@ export async function exportMute(
const content = getFullApAccount(u.username, u.host);
await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => {
stream.write(`${content}\n`, (err) => {
if (err) {
logger.error(err);
logger.warn("failed");
logger.info(inspect(err));
rej(err);
} else {
res();

View file

@ -10,6 +10,7 @@ import type { Note } from "@/models/entities/note.js";
import type { Poll } from "@/models/entities/poll.js";
import type { DbUserJobData } from "@/queue/types.js";
import { createTemp } from "@/misc/create-temp.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-notes");
@ -28,7 +29,7 @@ export async function exportNotes(
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
logger.info(`temp file created: ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
@ -37,7 +38,8 @@ export async function exportNotes(
return new Promise<void>((res, rej) => {
stream.write(text, (err) => {
if (err) {
logger.error(err);
logger.warn(`failed to export posts of ${job.data.user.id}`);
logger.info(inspect(err));
rej(err);
} else {
res();

View file

@ -9,6 +9,7 @@ import { createTemp } from "@/misc/create-temp.js";
import { Users, UserLists, UserListJoinings } from "@/models/index.js";
import { In } from "typeorm";
import type { DbUserJobData } from "@/queue/types.js";
import { inspect } from "node:util";
const logger = queueLogger.createSubLogger("export-user-lists");
@ -31,7 +32,7 @@ export async function exportUserLists(
// Create temp file
const [path, cleanup] = await createTemp();
logger.info(`Temp file is ${path}`);
logger.info(`temp file created: ${path}`);
try {
const stream = fs.createWriteStream(path, { flags: "a" });
@ -46,9 +47,10 @@ export async function exportUserLists(
const acct = getFullApAccount(u.username, u.host);
const content = `${list.name},${acct}`;
await new Promise<void>((res, rej) => {
stream.write(content + "\n", (err) => {
stream.write(`${content}\n`, (err) => {
if (err) {
logger.error(err);
logger.warn(`failed to export ${list.id}`);
logger.info(inspect(err));
rej(err);
} else {
res();

View file

@ -66,11 +66,12 @@ export async function importBlocking(
// skip myself
if (target.id === job.data.user.id) continue;
logger.info(`Block[${linenum}] ${target.id} ...`);
logger.debug(`Block[${linenum}] ${target.id} ...`);
await block(user, target);
} catch (e) {
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`);
logger.warn(`failed: error in line ${linenum}`);
logger.info(inspect(e));
}
}

View file

@ -32,7 +32,7 @@ export async function importCustomEmojis(
const [tempPath, cleanup] = await createTempDir();
logger.info(`Temp dir is ${tempPath}`);
logger.debug(`temp dir created: ${tempPath}`);
const destPath = `${tempPath}/emojis.zip`;

View file

@ -49,7 +49,7 @@ export async function importCkPost(
});
files.push(file);
} catch (e) {
logger.error(`Skipped adding file to drive: ${url}`);
logger.info(`Skipped adding file to drive: ${url}`);
}
}
const { text, cw, localOnly, createdAt, visibility } = Post.parse(post);
@ -71,12 +71,12 @@ export async function importCkPost(
fileIds: note.fileIds,
updatedAt: new Date(),
});
logger.info(`Note file updated`);
logger.info("Post updated");
}
if (!note) {
note = await create(user, {
createdAt: createdAt,
files: files.length == 0 ? undefined : files,
files: files.length === 0 ? undefined : files,
poll: undefined,
text: text || undefined,
reply: post.replyId ? job.data.parent : null,
@ -90,9 +90,9 @@ export async function importCkPost(
apHashtags: undefined,
apEmojis: undefined,
});
logger.info(`Create new note`);
logger.debug("New post has been created");
} else {
logger.info(`Note exist`);
logger.info("This post already exists");
}
logger.info("Imported");
if (post.childNotes) {

View file

@ -64,11 +64,12 @@ export async function importFollowing(
// skip myself
if (target.id === job.data.user.id) continue;
logger.info(`Follow[${linenum}] ${target.id} ...`);
logger.debug(`Follow[${linenum}] ${target.id} ...`);
follow(user, target);
} catch (e) {
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`);
logger.warn(`Error in line ${linenum}`);
logger.info(inspect(e));
}
}
} else {
@ -102,11 +103,12 @@ export async function importFollowing(
// skip myself
if (target.id === job.data.user.id) continue;
logger.info(`Follow[${linenum}] ${target.id} ...`);
logger.debug(`Follow[${linenum}] ${target.id} ...`);
follow(user, target);
} catch (e) {
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`);
logger.warn(`Error in line ${linenum}`);
logger.info(inspect(e));
}
}
}

View file

@ -73,7 +73,7 @@ export async function importMastoPost(
});
files.push(file);
} catch (e) {
logger.error(`Skipped adding file to drive: ${url}`);
logger.warn(`Skipped adding file to drive: ${url}`);
}
}
}
@ -97,14 +97,14 @@ export async function importMastoPost(
fileIds: note.fileIds,
updatedAt: new Date(),
});
logger.info(`Note file updated`);
logger.info("Post updated");
}
if (!note) {
note = await create(user, {
createdAt: isRenote
? new Date(post.published)
: new Date(post.object.published),
files: files.length == 0 ? undefined : files,
files: files.length === 0 ? undefined : files,
poll: undefined,
text: text || undefined,
reply,
@ -118,9 +118,9 @@ export async function importMastoPost(
apHashtags: undefined,
apEmojis: undefined,
});
logger.info(`Create new note`);
logger.debug("New post has been created");
} else {
logger.info(`Note exist`);
logger.info("This post already exists");
}
job.progress(100);
done();

View file

@ -66,11 +66,12 @@ export async function importMuting(
// skip myself
if (target.id === job.data.user.id) continue;
logger.info(`Mute[${linenum}] ${target.id} ...`);
logger.debug(`Mute[${linenum}] ${target.id} ...`);
await mute(user, target);
} catch (e) {
logger.warn(`Error in line ${linenum}: ${inspect(e)}`);
logger.warn(`Error in line ${linenum}`);
logger.info(inspect(e));
}
}

View file

@ -45,7 +45,8 @@ export async function importPosts(
}
} catch (e) {
// handle error
logger.warn(`Failed to read Mastodon archive:\n${inspect(e)}`);
logger.warn("Failed to read Mastodon archive");
logger.info(inspect(e));
}
logger.info("Mastodon archive imported");
done();
@ -56,21 +57,22 @@ export async function importPosts(
try {
const parsed = JSON.parse(json);
if (parsed instanceof Array) {
logger.info("Parsing key style posts");
if (Array.isArray(parsed)) {
logger.info("Parsing *key posts");
const arr = recreateChain(parsed);
for (const post of arr) {
createImportCkPostJob(job.data.user, post, job.data.signatureCheck);
}
} else if (parsed instanceof Object) {
logger.info("Parsing animal style posts");
logger.info("Parsing Mastodon posts");
for (const post of parsed.orderedItems) {
createImportMastoPostJob(job.data.user, post, job.data.signatureCheck);
}
}
} catch (e) {
// handle error
logger.warn(`Error occured while reading:\n${inspect(e)}`);
logger.warn("an error occured while reading");
logger.info(inspect(e));
}
logger.info("Imported");

View file

@ -86,7 +86,8 @@ export async function importUserLists(
pushUserToUserList(target, list!);
} catch (e) {
logger.warn(`Error in line ${linenum}:\n${inspect(e)}`);
logger.warn(`Error in line ${linenum}`);
logger.info(inspect(e));
}
}

View file

@ -48,6 +48,6 @@ export default async function cleanRemoteFiles(
job.progress(deletedCount / total);
}
logger.info("All cached remote files has been deleted.");
logger.info("All cached remote files are deleted.");
done();
}

View file

@ -4,7 +4,7 @@ import { UserIps } from "@/models/index.js";
import { queueLogger } from "../../logger.js";
const logger = queueLogger.createSubLogger("clean");
const logger = queueLogger.createSubLogger("clean-user-ip-log");
export async function clean(
job: Bull.Job<Record<string, unknown>>,

View file

@ -27,9 +27,8 @@ export async function setLocalEmojiSizes(
height: size.height || null,
});
} catch (e) {
logger.error(
`Unable to set emoji size (${i + 1}/${emojis.length}):\n${inspect(e)}`,
);
logger.warn(`Unable to set emoji size (${i + 1}/${emojis.length})`);
logger.info(inspect(e));
/* skip if any error happens */
} finally {
// wait for 1sec so that this would not overwhelm the object storage.

View file

@ -33,7 +33,8 @@ export async function verifyLinks(
fields: user.fields,
});
} catch (e) {
logger.error(`Failed to update user ${user.userId}:\n${inspect(e)}`);
logger.error(`Failed to update user ${user.userId}`);
logger.info(inspect(e));
done(e);
}
}

View file

@ -42,9 +42,11 @@ const router = new Router();
//#region Routing
const inboxLogger = serverLogger.createSubLogger("inbox");
function inbox(ctx: Router.RouterContext) {
if (ctx.req.headers.host !== config.host) {
serverLogger.warn("inbox: Invalid Host");
inboxLogger.warn(`regecting invalid host (${ctx.req.headers.host})`);
ctx.status = 400;
ctx.message = "Invalid Host";
return;
@ -57,7 +59,6 @@ function inbox(ctx: Router.RouterContext) {
headers: ["(request-target)", "digest", "host", "date"],
});
} catch (e) {
serverLogger.warn(`inbox: signature parse error:\n${inspect(e)}`);
ctx.status = 401;
if (e instanceof Error) {
@ -67,6 +68,9 @@ function inbox(ctx: Router.RouterContext) {
ctx.message = "Missing Required Header";
}
inboxLogger.info(`signature parse error: ${ctx.message}`);
inboxLogger.debug(inspect(e));
return;
}
@ -76,8 +80,8 @@ function inbox(ctx: Router.RouterContext) {
.toLowerCase()
.match(/^((dsa|rsa|ecdsa)-(sha256|sha384|sha512)|ed25519-sha512|hs2019)$/)
) {
serverLogger.warn(
`inbox: invalid signature algorithm ${signature.algorithm}`,
inboxLogger.info(
`rejecting signature: unknown algorithm (${signature.algorithm})`,
);
ctx.status = 401;
ctx.message = "Invalid Signature Algorithm";
@ -92,8 +96,8 @@ function inbox(ctx: Router.RouterContext) {
const digest = ctx.req.headers.digest;
if (typeof digest !== "string") {
serverLogger.warn(
"inbox: zero or more than one digest header(s) are present",
inboxLogger.info(
"rejecting invalid signature: zero or more than one digest header(s)",
);
ctx.status = 401;
ctx.message = "Invalid Digest Header";
@ -103,7 +107,7 @@ function inbox(ctx: Router.RouterContext) {
const match = digest.match(/^([0-9A-Za-z-]+)=(.+)$/);
if (match == null) {
serverLogger.warn("inbox: unrecognized digest header");
inboxLogger.info("rejecting signature: unrecognized digest header");
ctx.status = 401;
ctx.message = "Invalid Digest Header";
return;
@ -113,7 +117,7 @@ function inbox(ctx: Router.RouterContext) {
const expectedDigest = match[2];
if (digestAlgo.toUpperCase() !== "SHA-256") {
serverLogger.warn("inbox: unsupported digest algorithm");
inboxLogger.info("rejecting signature: unsupported digest algorithm");
ctx.status = 401;
ctx.message = "Unsupported Digest Algorithm";
return;
@ -125,7 +129,7 @@ function inbox(ctx: Router.RouterContext) {
.digest("base64");
if (expectedDigest !== actualDigest) {
serverLogger.warn("inbox: Digest Mismatch");
inboxLogger.info("rejecting invalid signature: Digest Mismatch");
ctx.status = 401;
ctx.message = "Digest Missmatch";
return;
@ -215,7 +219,9 @@ router.get("/notes/:note", async (ctx, next) => {
serverLogger.debug(JSON.stringify(remoteUser, null, 2));
if (remoteUser == null) {
serverLogger.debug("Rejecting: no user");
serverLogger.info(
"rejecting fetch attempt of private post: no authentication",
);
ctx.status = 401;
return;
}
@ -225,14 +231,14 @@ router.get("/notes/:note", async (ctx, next) => {
serverLogger.debug(JSON.stringify(relation, null, 2));
if (!relation.isFollowing || relation.isBlocked) {
serverLogger.debug(
"Rejecting: authenticated user is not following us or was blocked by us",
serverLogger.info(
"rejecting fetch attempt of private post: user is not a follower or is blocked",
);
ctx.status = 403;
return;
}
serverLogger.debug("Accepting: access criteria met");
serverLogger.debug("accepting fetch attempt of private post");
}
ctx.body = renderActivity(await renderNote(note, false));

View file

@ -28,7 +28,7 @@ const MAX_BYTE_RANGES = 10;
const commonReadableHandlerGenerator =
(ctx: Koa.Context) =>
(e: Error): void => {
serverLogger.error(e);
serverLogger.warn(e);
ctx.status = 500;
ctx.set("Cache-Control", "max-age=300");
};
@ -109,7 +109,8 @@ export default async function (ctx: Koa.Context) {
);
ctx.set("Cache-Control", "max-age=31536000, immutable");
} catch (e) {
serverLogger.error(`${inspect(e)}`);
serverLogger.warn(`failed to fetch/convert ${file.uri}`);
serverLogger.debug(inspect(e));
if (e instanceof StatusError && e.isClientError) {
ctx.status = e.statusCode;

View file

@ -52,7 +52,7 @@ if (!["production", "test"].includes(process.env.NODE_ENV || "")) {
// Logger
app.use(
koaLogger((str) => {
serverLogger.info(str);
serverLogger.debug(str);
}),
);
@ -177,7 +177,7 @@ mastoRouter.post("/oauth/token", async (ctx) => {
console.log(body.code, token);
token = body.code;
}
if (client_id instanceof Array) {
if (Array.isArray(client_id)) {
client_id = client_id.toString();
} else if (!client_id) {
client_id = null;
@ -194,10 +194,10 @@ mastoRouter.post("/oauth/token", async (ctx) => {
scope: body.scope || "read write follow push",
created_at: Math.floor(Date.now() / 1000),
};
serverLogger.info("token-response", ret);
serverLogger.debug("Mastodon API token response", ret);
ctx.body = ret;
} catch (err: any) {
serverLogger.error(inspect(err));
serverLogger.warn(err);
ctx.status = 401;
ctx.body = err.response.data;
}
@ -246,7 +246,7 @@ export default () =>
);
break;
default:
serverLogger.error(e);
serverLogger.error(inspect(e));
break;
}

View file

@ -131,7 +131,8 @@ export async function proxyMedia(ctx: Koa.Context) {
ctx.set("Cache-Control", "max-age=31536000, immutable");
ctx.body = image.data;
} catch (e) {
serverLogger.error(`${inspect(e)}`);
serverLogger.warn(`failed to proxy ${url}`);
serverLogger.debug(inspect(e));
if (e instanceof StatusError && (e.statusCode === 302 || e.isClientError)) {
ctx.status = e.statusCode;

View file

@ -103,9 +103,9 @@ export default class Logger {
: level === "warning"
? chalk.yellow("WARN")
: level === "info"
? chalk.green("INFO")
? chalk.cyan("INFO")
: level === "debug"
? chalk.blue("DEBUG")
? chalk.green("DEBUG")
: level === "trace"
? chalk.gray("TRACE")
: null;