mirror of
https://github.com/mastodon/mastodon.git
synced 2024-12-22 17:35:40 +01:00
Convert the streaming server to ESM (#29389)
Co-authored-by: Claire <claire.github-309c@sitedethib.com>
This commit is contained in:
parent
bc4c5ed918
commit
036f5a05e3
8 changed files with 65 additions and 78 deletions
|
@ -1,4 +1,8 @@
|
||||||
|
/* eslint-disable import/no-commonjs */
|
||||||
|
|
||||||
// @ts-check
|
// @ts-check
|
||||||
|
|
||||||
|
// @ts-ignore - This needs to be a CJS file (eslint does not yet support ESM configs), and TS is complaining we use require
|
||||||
const { defineConfig } = require('eslint-define-config');
|
const { defineConfig } = require('eslint-define-config');
|
||||||
|
|
||||||
module.exports = defineConfig({
|
module.exports = defineConfig({
|
||||||
|
@ -22,22 +26,18 @@ module.exports = defineConfig({
|
||||||
// to maintain.
|
// to maintain.
|
||||||
'no-delete-var': 'off',
|
'no-delete-var': 'off',
|
||||||
|
|
||||||
// The streaming server is written in commonjs, not ESM for now:
|
|
||||||
'import/no-commonjs': 'off',
|
|
||||||
|
|
||||||
// This overrides the base configuration for this rule to pick up
|
// This overrides the base configuration for this rule to pick up
|
||||||
// dependencies for the streaming server from the correct package.json file.
|
// dependencies for the streaming server from the correct package.json file.
|
||||||
'import/no-extraneous-dependencies': [
|
'import/no-extraneous-dependencies': [
|
||||||
'error',
|
'error',
|
||||||
{
|
{
|
||||||
devDependencies: [
|
devDependencies: ['streaming/.eslintrc.cjs'],
|
||||||
'streaming/.eslintrc.js',
|
|
||||||
],
|
|
||||||
optionalDependencies: false,
|
optionalDependencies: false,
|
||||||
peerDependencies: false,
|
peerDependencies: false,
|
||||||
includeTypes: true,
|
includeTypes: true,
|
||||||
packageDir: __dirname,
|
packageDir: __dirname,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
'import/extensions': ['error', 'always'],
|
||||||
},
|
},
|
||||||
});
|
});
|
|
@ -5,15 +5,14 @@
|
||||||
* override it in let statements.
|
* override it in let statements.
|
||||||
* @type {string}
|
* @type {string}
|
||||||
*/
|
*/
|
||||||
const UNEXPECTED_ERROR_MESSAGE = 'An unexpected error occurred';
|
export const UNEXPECTED_ERROR_MESSAGE = 'An unexpected error occurred';
|
||||||
exports.UNKNOWN_ERROR_MESSAGE = UNEXPECTED_ERROR_MESSAGE;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Extracts the status and message properties from the error object, if
|
* Extracts the status and message properties from the error object, if
|
||||||
* available for public use. The `unknown` is for catch statements
|
* available for public use. The `unknown` is for catch statements
|
||||||
* @param {Error | AuthenticationError | RequestError | unknown} err
|
* @param {Error | AuthenticationError | RequestError | unknown} err
|
||||||
*/
|
*/
|
||||||
exports.extractStatusAndMessage = function(err) {
|
export function extractStatusAndMessage(err) {
|
||||||
let statusCode = 500;
|
let statusCode = 500;
|
||||||
let errorMessage = UNEXPECTED_ERROR_MESSAGE;
|
let errorMessage = UNEXPECTED_ERROR_MESSAGE;
|
||||||
if (err instanceof AuthenticationError || err instanceof RequestError) {
|
if (err instanceof AuthenticationError || err instanceof RequestError) {
|
||||||
|
@ -22,9 +21,9 @@ exports.extractStatusAndMessage = function(err) {
|
||||||
}
|
}
|
||||||
|
|
||||||
return { statusCode, errorMessage };
|
return { statusCode, errorMessage };
|
||||||
};
|
}
|
||||||
|
|
||||||
class RequestError extends Error {
|
export class RequestError extends Error {
|
||||||
/**
|
/**
|
||||||
* @param {string} message
|
* @param {string} message
|
||||||
*/
|
*/
|
||||||
|
@ -35,9 +34,7 @@ class RequestError extends Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.RequestError = RequestError;
|
export class AuthenticationError extends Error {
|
||||||
|
|
||||||
class AuthenticationError extends Error {
|
|
||||||
/**
|
/**
|
||||||
* @param {string} message
|
* @param {string} message
|
||||||
*/
|
*/
|
||||||
|
@ -47,5 +44,3 @@ class AuthenticationError extends Error {
|
||||||
this.status = 401;
|
this.status = 401;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.AuthenticationError = AuthenticationError;
|
|
||||||
|
|
|
@ -1,32 +1,36 @@
|
||||||
// @ts-check
|
// @ts-check
|
||||||
|
|
||||||
const fs = require('fs');
|
import fs from 'node:fs';
|
||||||
const http = require('http');
|
import http from 'node:http';
|
||||||
const path = require('path');
|
import path from 'node:path';
|
||||||
const url = require('url');
|
import url from 'node:url';
|
||||||
|
|
||||||
const cors = require('cors');
|
import cors from 'cors';
|
||||||
const dotenv = require('dotenv');
|
import dotenv from 'dotenv';
|
||||||
const express = require('express');
|
import express from 'express';
|
||||||
const { Redis } = require('ioredis');
|
import { Redis } from 'ioredis';
|
||||||
const { JSDOM } = require('jsdom');
|
import { JSDOM } from 'jsdom';
|
||||||
const pg = require('pg');
|
import pg from 'pg';
|
||||||
const dbUrlToConfig = require('pg-connection-string').parse;
|
import pgConnectionString from 'pg-connection-string';
|
||||||
const WebSocket = require('ws');
|
import WebSocket from 'ws';
|
||||||
|
|
||||||
const errors = require('./errors');
|
import { AuthenticationError, RequestError, extractStatusAndMessage as extractErrorStatusAndMessage } from './errors.js';
|
||||||
const { AuthenticationError, RequestError } = require('./errors');
|
import { logger, httpLogger, initializeLogLevel, attachWebsocketHttpLogger, createWebsocketLogger } from './logging.js';
|
||||||
const { logger, httpLogger, initializeLogLevel, attachWebsocketHttpLogger, createWebsocketLogger } = require('./logging');
|
import { setupMetrics } from './metrics.js';
|
||||||
const { setupMetrics } = require('./metrics');
|
import { isTruthy, normalizeHashtag, firstParam } from './utils.js';
|
||||||
const { isTruthy, normalizeHashtag, firstParam } = require("./utils");
|
|
||||||
|
|
||||||
const environment = process.env.NODE_ENV || 'development';
|
const environment = process.env.NODE_ENV || 'development';
|
||||||
|
|
||||||
// Correctly detect and load .env or .env.production file based on environment:
|
// Correctly detect and load .env or .env.production file based on environment:
|
||||||
const dotenvFile = environment === 'production' ? '.env.production' : '.env';
|
const dotenvFile = environment === 'production' ? '.env.production' : '.env';
|
||||||
|
const dotenvFilePath = path.resolve(
|
||||||
|
url.fileURLToPath(
|
||||||
|
new URL(path.join('..', dotenvFile), import.meta.url)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
dotenv.config({
|
dotenv.config({
|
||||||
path: path.resolve(__dirname, path.join('..', dotenvFile))
|
path: dotenvFilePath
|
||||||
});
|
});
|
||||||
|
|
||||||
initializeLogLevel(process.env, environment);
|
initializeLogLevel(process.env, environment);
|
||||||
|
@ -143,7 +147,7 @@ const pgConfigFromEnv = (env) => {
|
||||||
let baseConfig = {};
|
let baseConfig = {};
|
||||||
|
|
||||||
if (env.DATABASE_URL) {
|
if (env.DATABASE_URL) {
|
||||||
const parsedUrl = dbUrlToConfig(env.DATABASE_URL);
|
const parsedUrl = pgConnectionString.parse(env.DATABASE_URL);
|
||||||
|
|
||||||
// The result of dbUrlToConfig from pg-connection-string is not type
|
// The result of dbUrlToConfig from pg-connection-string is not type
|
||||||
// compatible with pg.PoolConfig, since parts of the connection URL may be
|
// compatible with pg.PoolConfig, since parts of the connection URL may be
|
||||||
|
@ -326,7 +330,7 @@ const startServer = async () => {
|
||||||
// Unfortunately for using the on('upgrade') setup, we need to manually
|
// Unfortunately for using the on('upgrade') setup, we need to manually
|
||||||
// write a HTTP Response to the Socket to close the connection upgrade
|
// write a HTTP Response to the Socket to close the connection upgrade
|
||||||
// attempt, so the following code is to handle all of that.
|
// attempt, so the following code is to handle all of that.
|
||||||
const {statusCode, errorMessage } = errors.extractStatusAndMessage(err);
|
const {statusCode, errorMessage } = extractErrorStatusAndMessage(err);
|
||||||
|
|
||||||
/** @type {Record<string, string | number | import('pino-http').ReqId>} */
|
/** @type {Record<string, string | number | import('pino-http').ReqId>} */
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -748,7 +752,7 @@ const startServer = async () => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const {statusCode, errorMessage } = errors.extractStatusAndMessage(err);
|
const {statusCode, errorMessage } = extractErrorStatusAndMessage(err);
|
||||||
|
|
||||||
res.writeHead(statusCode, { 'Content-Type': 'application/json' });
|
res.writeHead(statusCode, { 'Content-Type': 'application/json' });
|
||||||
res.end(JSON.stringify({ error: errorMessage }));
|
res.end(JSON.stringify({ error: errorMessage }));
|
||||||
|
@ -1155,7 +1159,7 @@ const startServer = async () => {
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
streamFrom(channelIds, req, req.log, onSend, onEnd, 'eventsource', options.needsFiltering);
|
streamFrom(channelIds, req, req.log, onSend, onEnd, 'eventsource', options.needsFiltering);
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
const {statusCode, errorMessage } = errors.extractStatusAndMessage(err);
|
const {statusCode, errorMessage } = extractErrorStatusAndMessage(err);
|
||||||
|
|
||||||
res.log.info({ err }, 'Eventsource subscription error');
|
res.log.info({ err }, 'Eventsource subscription error');
|
||||||
|
|
||||||
|
@ -1353,7 +1357,7 @@ const startServer = async () => {
|
||||||
stopHeartbeat,
|
stopHeartbeat,
|
||||||
};
|
};
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
const {statusCode, errorMessage } = errors.extractStatusAndMessage(err);
|
const {statusCode, errorMessage } = extractErrorStatusAndMessage(err);
|
||||||
|
|
||||||
logger.error({ err }, 'Websocket subscription error');
|
logger.error({ err }, 'Websocket subscription error');
|
||||||
|
|
||||||
|
@ -1482,13 +1486,15 @@ const startServer = async () => {
|
||||||
// Decrement the metrics for connected clients:
|
// Decrement the metrics for connected clients:
|
||||||
connectedClients.labels({ type: 'websocket' }).dec();
|
connectedClients.labels({ type: 'websocket' }).dec();
|
||||||
|
|
||||||
// We need to delete the session object as to ensure it correctly gets
|
// We need to unassign the session object as to ensure it correctly gets
|
||||||
// garbage collected, without doing this we could accidentally hold on to
|
// garbage collected, without doing this we could accidentally hold on to
|
||||||
// references to the websocket, the request, and the logger, causing
|
// references to the websocket, the request, and the logger, causing
|
||||||
// memory leaks.
|
// memory leaks.
|
||||||
//
|
|
||||||
// @ts-ignore
|
// This is commented out because `delete` only operated on object properties
|
||||||
delete session;
|
// It needs to be replaced by `session = undefined`, but it requires every calls to
|
||||||
|
// `session` to check for it, thus a significant refactor
|
||||||
|
// delete session;
|
||||||
});
|
});
|
||||||
|
|
||||||
// Note: immediately after the `error` event is emitted, the `close` event
|
// Note: immediately after the `error` event is emitted, the `close` event
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
const { pino } = require('pino');
|
import { pino } from 'pino';
|
||||||
const { pinoHttp, stdSerializers: pinoHttpSerializers } = require('pino-http');
|
import { pinoHttp, stdSerializers as pinoHttpSerializers } from 'pino-http';
|
||||||
const uuid = require('uuid');
|
import * as uuid from 'uuid';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates the Request ID for logging and setting on responses
|
* Generates the Request ID for logging and setting on responses
|
||||||
|
@ -36,7 +36,7 @@ function sanitizeRequestLog(req) {
|
||||||
return log;
|
return log;
|
||||||
}
|
}
|
||||||
|
|
||||||
const logger = pino({
|
export const logger = pino({
|
||||||
name: "streaming",
|
name: "streaming",
|
||||||
// Reformat the log level to a string:
|
// Reformat the log level to a string:
|
||||||
formatters: {
|
formatters: {
|
||||||
|
@ -59,7 +59,7 @@ const logger = pino({
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const httpLogger = pinoHttp({
|
export const httpLogger = pinoHttp({
|
||||||
logger,
|
logger,
|
||||||
genReqId: generateRequestId,
|
genReqId: generateRequestId,
|
||||||
serializers: {
|
serializers: {
|
||||||
|
@ -71,7 +71,7 @@ const httpLogger = pinoHttp({
|
||||||
* Attaches a logger to the request object received by http upgrade handlers
|
* Attaches a logger to the request object received by http upgrade handlers
|
||||||
* @param {http.IncomingMessage} request
|
* @param {http.IncomingMessage} request
|
||||||
*/
|
*/
|
||||||
function attachWebsocketHttpLogger(request) {
|
export function attachWebsocketHttpLogger(request) {
|
||||||
generateRequestId(request);
|
generateRequestId(request);
|
||||||
|
|
||||||
request.log = logger.child({
|
request.log = logger.child({
|
||||||
|
@ -84,7 +84,7 @@ function attachWebsocketHttpLogger(request) {
|
||||||
* @param {http.IncomingMessage} request
|
* @param {http.IncomingMessage} request
|
||||||
* @param {import('./index.js').ResolvedAccount} resolvedAccount
|
* @param {import('./index.js').ResolvedAccount} resolvedAccount
|
||||||
*/
|
*/
|
||||||
function createWebsocketLogger(request, resolvedAccount) {
|
export function createWebsocketLogger(request, resolvedAccount) {
|
||||||
// ensure the request.id is always present.
|
// ensure the request.id is always present.
|
||||||
generateRequestId(request);
|
generateRequestId(request);
|
||||||
|
|
||||||
|
@ -98,17 +98,12 @@ function createWebsocketLogger(request, resolvedAccount) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.logger = logger;
|
|
||||||
exports.httpLogger = httpLogger;
|
|
||||||
exports.attachWebsocketHttpLogger = attachWebsocketHttpLogger;
|
|
||||||
exports.createWebsocketLogger = createWebsocketLogger;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initializes the log level based on the environment
|
* Initializes the log level based on the environment
|
||||||
* @param {Object<string, any>} env
|
* @param {Object<string, any>} env
|
||||||
* @param {string} environment
|
* @param {string} environment
|
||||||
*/
|
*/
|
||||||
exports.initializeLogLevel = function initializeLogLevel(env, environment) {
|
export function initializeLogLevel(env, environment) {
|
||||||
if (env.LOG_LEVEL && Object.keys(logger.levels.values).includes(env.LOG_LEVEL)) {
|
if (env.LOG_LEVEL && Object.keys(logger.levels.values).includes(env.LOG_LEVEL)) {
|
||||||
logger.level = env.LOG_LEVEL;
|
logger.level = env.LOG_LEVEL;
|
||||||
} else if (environment === 'development') {
|
} else if (environment === 'development') {
|
||||||
|
@ -116,4 +111,4 @@ exports.initializeLogLevel = function initializeLogLevel(env, environment) {
|
||||||
} else {
|
} else {
|
||||||
logger.level = 'info';
|
logger.level = 'info';
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
// @ts-check
|
// @ts-check
|
||||||
|
|
||||||
const metrics = require('prom-client');
|
import metrics from 'prom-client';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @typedef StreamingMetrics
|
* @typedef StreamingMetrics
|
||||||
|
@ -18,7 +18,7 @@ const metrics = require('prom-client');
|
||||||
* @param {import('pg').Pool} pgPool
|
* @param {import('pg').Pool} pgPool
|
||||||
* @returns {StreamingMetrics}
|
* @returns {StreamingMetrics}
|
||||||
*/
|
*/
|
||||||
function setupMetrics(channels, pgPool) {
|
export function setupMetrics(channels, pgPool) {
|
||||||
// Collect metrics from Node.js
|
// Collect metrics from Node.js
|
||||||
metrics.collectDefaultMetrics();
|
metrics.collectDefaultMetrics();
|
||||||
|
|
||||||
|
@ -101,5 +101,3 @@ function setupMetrics(channels, pgPool) {
|
||||||
messagesSent,
|
messagesSent,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.setupMetrics = setupMetrics;
|
|
||||||
|
|
|
@ -7,6 +7,7 @@
|
||||||
},
|
},
|
||||||
"description": "Mastodon's Streaming Server",
|
"description": "Mastodon's Streaming Server",
|
||||||
"private": true,
|
"private": true,
|
||||||
|
"type": "module",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/mastodon/mastodon.git"
|
"url": "https://github.com/mastodon/mastodon.git"
|
||||||
|
|
|
@ -2,11 +2,11 @@
|
||||||
"extends": "../tsconfig.json",
|
"extends": "../tsconfig.json",
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"target": "esnext",
|
"target": "esnext",
|
||||||
"module": "CommonJS",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "node",
|
"moduleResolution": "NodeNext",
|
||||||
"noUnusedParameters": false,
|
"noUnusedParameters": false,
|
||||||
"tsBuildInfoFile": "../tmp/cache/streaming/tsconfig.tsbuildinfo",
|
"tsBuildInfoFile": "../tmp/cache/streaming/tsconfig.tsbuildinfo",
|
||||||
"paths": {},
|
"paths": {},
|
||||||
},
|
},
|
||||||
"include": ["./*.js", "./.eslintrc.js"],
|
"include": ["./*.js", "./.eslintrc.cjs"],
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,11 +16,9 @@ const FALSE_VALUES = [
|
||||||
* @param {any} value
|
* @param {any} value
|
||||||
* @returns {boolean}
|
* @returns {boolean}
|
||||||
*/
|
*/
|
||||||
const isTruthy = value =>
|
export function isTruthy(value) {
|
||||||
value && !FALSE_VALUES.includes(value);
|
return value && !FALSE_VALUES.includes(value);
|
||||||
|
}
|
||||||
exports.isTruthy = isTruthy;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* See app/lib/ascii_folder.rb for the canon definitions
|
* See app/lib/ascii_folder.rb for the canon definitions
|
||||||
|
@ -33,7 +31,7 @@ const EQUIVALENT_ASCII_CHARS = 'AAAAAAaaaaaaAaAaAaCcCcCcCcCcDdDdDdEEEEeeeeEeEeEe
|
||||||
* @param {string} str
|
* @param {string} str
|
||||||
* @returns {string}
|
* @returns {string}
|
||||||
*/
|
*/
|
||||||
function foldToASCII(str) {
|
export function foldToASCII(str) {
|
||||||
const regex = new RegExp(NON_ASCII_CHARS.split('').join('|'), 'g');
|
const regex = new RegExp(NON_ASCII_CHARS.split('').join('|'), 'g');
|
||||||
|
|
||||||
return str.replace(regex, function(match) {
|
return str.replace(regex, function(match) {
|
||||||
|
@ -42,28 +40,22 @@ function foldToASCII(str) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.foldToASCII = foldToASCII;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string} str
|
* @param {string} str
|
||||||
* @returns {string}
|
* @returns {string}
|
||||||
*/
|
*/
|
||||||
function normalizeHashtag(str) {
|
export function normalizeHashtag(str) {
|
||||||
return foldToASCII(str.normalize('NFKC').toLowerCase()).replace(/[^\p{L}\p{N}_\u00b7\u200c]/gu, '');
|
return foldToASCII(str.normalize('NFKC').toLowerCase()).replace(/[^\p{L}\p{N}_\u00b7\u200c]/gu, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.normalizeHashtag = normalizeHashtag;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param {string|string[]} arrayOrString
|
* @param {string|string[]} arrayOrString
|
||||||
* @returns {string}
|
* @returns {string}
|
||||||
*/
|
*/
|
||||||
function firstParam(arrayOrString) {
|
export function firstParam(arrayOrString) {
|
||||||
if (Array.isArray(arrayOrString)) {
|
if (Array.isArray(arrayOrString)) {
|
||||||
return arrayOrString[0];
|
return arrayOrString[0];
|
||||||
} else {
|
} else {
|
||||||
return arrayOrString;
|
return arrayOrString;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.firstParam = firstParam;
|
|
||||||
|
|
Loading…
Reference in a new issue