aboutsummaryrefslogtreecommitdiff
path: root/src/lib/kafka.ts
blob: 1d60e1f2818de866655081b59100881e2e98eb82 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import type * as tls from 'node:tls';
import debug from 'debug';
import { Kafka, logLevel, type Producer, type RecordMetadata, type SASLOptions } from 'kafkajs';
import { serializeError } from 'serialize-error';
import { KAFKA, KAFKA_PRODUCER } from '@/lib/db';

const log = debug('umami:kafka');
const CONNECT_TIMEOUT = 5000;
const SEND_TIMEOUT = 3000;
const ACKS = 1;

let kafka: Kafka;
let producer: Producer;
const enabled = Boolean(process.env.KAFKA_URL && process.env.KAFKA_BROKER);

function getClient() {
  const { username, password } = new URL(process.env.KAFKA_URL);
  const brokers = process.env.KAFKA_BROKER.split(',');
  const mechanism =
    (process.env.KAFKA_SASL_MECHANISM as 'plain' | 'scram-sha-256' | 'scram-sha-512') || 'plain';

  const ssl: { ssl?: tls.ConnectionOptions | boolean; sasl?: SASLOptions } =
    username && password
      ? {
          ssl: {
            rejectUnauthorized: false,
          },
          sasl: {
            mechanism,
            username,
            password,
          },
        }
      : {};

  const client: Kafka = new Kafka({
    clientId: 'umami',
    brokers: brokers,
    connectionTimeout: CONNECT_TIMEOUT,
    logLevel: logLevel.ERROR,
    ...ssl,
  });

  if (process.env.NODE_ENV !== 'production') {
    globalThis[KAFKA] = client;
  }

  log('Kafka initialized');

  return client;
}

async function getProducer(): Promise<Producer> {
  const producer = kafka.producer();
  await producer.connect();

  if (process.env.NODE_ENV !== 'production') {
    globalThis[KAFKA_PRODUCER] = producer;
  }

  log('Kafka producer initialized');

  return producer;
}

async function sendMessage(
  topic: string,
  message: Record<string, string | number> | Record<string, string | number>[],
): Promise<RecordMetadata[]> {
  try {
    await connect();

    return producer.send({
      topic,
      messages: Array.isArray(message)
        ? message.map(a => {
            return { value: JSON.stringify(a) };
          })
        : [
            {
              value: JSON.stringify(message),
            },
          ],
      timeout: SEND_TIMEOUT,
      acks: ACKS,
    });
  } catch (e) {
    // eslint-disable-next-line no-console
    console.log('KAFKA ERROR:', serializeError(e));
  }
}

async function connect(): Promise<Kafka> {
  if (!kafka) {
    kafka = process.env.KAFKA_URL && process.env.KAFKA_BROKER && (globalThis[KAFKA] || getClient());

    if (kafka) {
      producer = globalThis[KAFKA_PRODUCER] || (await getProducer());
    }
  }

  return kafka;
}

export default {
  enabled,
  client: kafka,
  producer,
  log,
  connect,
  sendMessage,
};