3
0
Fork 0
mirror of https://github.com/ZeppelinBot/Zeppelin.git synced 2025-03-15 05:41:51 +00:00

Consolidate .env files. More work on dev containers.

This commit is contained in:
Dragory 2022-06-26 14:34:54 +03:00
parent 2a959f354c
commit 3773d659cc
No known key found for this signature in database
GPG key ID: 5F387BA66DF8AAC1
17 changed files with 137 additions and 106 deletions

View file

@ -1,4 +1,4 @@
ENCRYPTION_KEY=32_character_encryption_key KEY=32_character_encryption_key
CLIENT_ID= CLIENT_ID=
CLIENT_SECRET= CLIENT_SECRET=
@ -7,26 +7,37 @@ BOT_TOKEN=
OAUTH_CALLBACK_URL= OAUTH_CALLBACK_URL=
DASHBOARD_DOMAIN= DASHBOARD_DOMAIN=
API_DOMAIN= API_DOMAIN=
PORT=443 API_PORT=3000
#
# DOCKER (DEVELOPMENT)
#
DOCKER_WEB_PORT=443
# The MySQL database running in the container is exposed to the host on this port, # The MySQL database running in the container is exposed to the host on this port,
# allowing access with database tools such as DBeaver # allowing access with database tools such as DBeaver
MYSQL_PORT=3001 DOCKER_MYSQL_PORT=3001
# Password for the Zeppelin database user # Password for the Zeppelin database user
MYSQL_PASSWORD= DOCKER_MYSQL_PASSWORD=
# Password for the MySQL root user # Password for the MySQL root user
MYSQL_ROOT_PASSWORD= DOCKER_MYSQL_ROOT_PASSWORD=
# The development environment container has an SSH server that you can connect to. # The development environment container has an SSH server that you can connect to.
# This is the port that server is exposed to the host on. # This is the port that server is exposed to the host on.
DEVELOPMENT_SSH_PORT=3002 DOCKER_DEV_SSH_PORT=3002
DOCKER_DEV_SSH_PASSWORD=password
# Only required if relevant feature is used # Only required if relevant feature is used
#PHISHERMAN_API_KEY= #PHISHERMAN_API_KEY=
#
# PRODUCTION
#
# In production, the newest code is pulled from a repository # In production, the newest code is pulled from a repository
# Specify that repository URL here # Specify that repository URL here
PRODUCTION_REPOSITORY=https://github.com/ZeppelinBot/Zeppelin.git #PRODUCTION_REPOSITORY=https://github.com/ZeppelinBot/Zeppelin.git
# You only need to set these if you're running an external database. # You only need to set these if you're running an external database.
# In a standard setup, the database is run in a docker container. # In a standard setup, the database is run in a docker container.

View file

@ -9,6 +9,7 @@ import { ApiPermissionAssignments } from "../data/ApiPermissionAssignments";
import { ApiUserInfo } from "../data/ApiUserInfo"; import { ApiUserInfo } from "../data/ApiUserInfo";
import { ApiUserInfoData } from "../data/entities/ApiUserInfo"; import { ApiUserInfoData } from "../data/entities/ApiUserInfo";
import { ok } from "./responses"; import { ok } from "./responses";
import { env } from "../env";
interface IPassportApiUser { interface IPassportApiUser {
apiKey: string; apiKey: string;
@ -54,22 +55,6 @@ function simpleDiscordAPIRequest(bearerToken, path): Promise<any> {
export function initAuth(app: express.Express) { export function initAuth(app: express.Express) {
app.use(passport.initialize()); app.use(passport.initialize());
if (!process.env.CLIENT_ID) {
throw new Error("Auth: CLIENT ID missing");
}
if (!process.env.CLIENT_SECRET) {
throw new Error("Auth: CLIENT SECRET missing");
}
if (!process.env.OAUTH_CALLBACK_URL) {
throw new Error("Auth: OAUTH CALLBACK URL missing");
}
if (!process.env.DASHBOARD_URL) {
throw new Error("DASHBOARD_URL missing!");
}
passport.serializeUser((user, done) => done(null, user)); passport.serializeUser((user, done) => done(null, user));
passport.deserializeUser((user, done) => done(null, user)); passport.deserializeUser((user, done) => done(null, user));
@ -101,9 +86,9 @@ export function initAuth(app: express.Express) {
{ {
authorizationURL: "https://discord.com/api/oauth2/authorize", authorizationURL: "https://discord.com/api/oauth2/authorize",
tokenURL: "https://discord.com/api/oauth2/token", tokenURL: "https://discord.com/api/oauth2/token",
clientID: process.env.CLIENT_ID, clientID: env.CLIENT_ID,
clientSecret: process.env.CLIENT_SECRET, clientSecret: env.CLIENT_SECRET,
callbackURL: process.env.OAUTH_CALLBACK_URL, callbackURL: env.OAUTH_CALLBACK_URL,
scope: ["identify"], scope: ["identify"],
}, },
async (accessToken, refreshToken, profile, cb) => { async (accessToken, refreshToken, profile, cb) => {
@ -132,9 +117,9 @@ export function initAuth(app: express.Express) {
passport.authenticate("oauth2", { failureRedirect: "/", session: false }), passport.authenticate("oauth2", { failureRedirect: "/", session: false }),
(req: Request, res: Response) => { (req: Request, res: Response) => {
if (req.user && req.user.apiKey) { if (req.user && req.user.apiKey) {
res.redirect(`${process.env.DASHBOARD_URL}/login-callback/?apiKey=${req.user.apiKey}`); res.redirect(`https://${env.DASHBOARD_DOMAIN}/login-callback/?apiKey=${req.user.apiKey}`);
} else { } else {
res.redirect(`${process.env.DASHBOARD_URL}/login-callback/?error=noAccess`); res.redirect(`https://${env.DASHBOARD_DOMAIN}/login-callback/?error=noAccess`);
} }
}, },
); );

View file

@ -1,8 +1,8 @@
import { connect } from "../data/db"; import { connect } from "../data/db";
import { setIsAPI } from "../globals"; import { setIsAPI } from "../globals";
import "./loadEnv"; import { apiEnv } from "./loadApiEnv";
if (!process.env.KEY) { if (!apiEnv.KEY) {
// tslint:disable-next-line:no-console // tslint:disable-next-line:no-console
console.error("Project root .env with KEY is required!"); console.error("Project root .env with KEY is required!");
process.exit(1); process.exit(1);

View file

@ -1,4 +0,0 @@
import path from "path";
require("dotenv").config({ path: path.resolve(process.cwd(), "../.env") });
require("dotenv").config({ path: path.resolve(process.cwd(), "api.env") });

View file

@ -8,12 +8,13 @@ import { initGuildsAPI } from "./guilds/index";
import { clientError, error, notFound } from "./responses"; import { clientError, error, notFound } from "./responses";
import { startBackgroundTasks } from "./tasks"; import { startBackgroundTasks } from "./tasks";
import multer from "multer"; import multer from "multer";
import { env } from "../env";
const app = express(); const app = express();
app.use( app.use(
cors({ cors({
origin: process.env.DASHBOARD_URL, origin: `https://${env.DASHBOARD_DOMAIN}`,
}), }),
); );
app.use( app.use(
@ -48,7 +49,7 @@ app.use((req, res, next) => {
return notFound(res); return notFound(res);
}); });
const port = (process.env.PORT && parseInt(process.env.PORT, 10)) || 3000; const port = env.API_PORT;
app.listen(port, "0.0.0.0", () => console.log(`API server listening on port ${port}`)); // tslint:disable-line app.listen(port, "0.0.0.0", () => console.log(`API server listening on port ${port}`)); // tslint:disable-line
startBackgroundTasks(); startBackgroundTasks();

View file

@ -6,9 +6,10 @@ import { DAYS, DBDateFormat, HOURS, MINUTES } from "../utils";
import moment from "moment-timezone"; import moment from "moment-timezone";
import { PhishermanKeyCacheEntry } from "./entities/PhishermanKeyCacheEntry"; import { PhishermanKeyCacheEntry } from "./entities/PhishermanKeyCacheEntry";
import crypto from "crypto"; import crypto from "crypto";
import { env } from "../env";
const API_URL = "https://api.phisherman.gg"; const API_URL = "https://api.phisherman.gg";
const MASTER_API_KEY = process.env.PHISHERMAN_API_KEY; const MASTER_API_KEY = env.PHISHERMAN_API_KEY;
let caughtDomainTrackingMap: Map<string, Map<string, number[]>> = new Map(); let caughtDomainTrackingMap: Map<string, Map<string, number[]>> = new Map();

44
backend/src/env.ts Normal file
View file

@ -0,0 +1,44 @@
import path from "path";
import fs from "fs";
import dotenv from "dotenv";
import { rootDir } from "./paths";
import { z } from "zod";
const envType = z.object({
KEY: z.string().length(32),
CLIENT_ID: z.string(),
CLIENT_SECRET: z.string(),
BOT_TOKEN: z.string(),
OAUTH_CALLBACK_URL: z.string().url(),
DASHBOARD_DOMAIN: z.string(),
API_DOMAIN: z.string(),
STAFF: z.preprocess((v) => String(v).split(","), z.array(z.string())).optional(),
PHISHERMAN_API_KEY: z.string().optional(),
API_PORT: z.number().min(1).max(65535),
DOCKER_MYSQL_PASSWORD: z.string().optional(), // Included here for the DB_PASSWORD default in development
DB_HOST: z.string().optional().default("mysql"),
DB_PORT: z.number().optional().default(3306),
DB_USER: z.string().optional().default("zeppelin"),
DB_PASSWORD: z.string().optional(), // Default is set to DOCKER_MYSQL_PASSWORD further below
DB_DATABASE: z.string().optional().default("zeppelin"),
});
let toValidate = {};
const envPath = path.join(rootDir, "../.env");
if (fs.existsSync(envPath)) {
const buf = fs.readFileSync(envPath);
toValidate = dotenv.parse(buf);
}
export const env = envType.parse(toValidate);
if (env.DOCKER_MYSQL_PASSWORD && !env.DB_PASSWORD) {
env.DB_PASSWORD = env.DOCKER_MYSQL_PASSWORD;
}

View file

@ -10,7 +10,6 @@ import { connect } from "./data/db";
import { GuildLogs } from "./data/GuildLogs"; import { GuildLogs } from "./data/GuildLogs";
import { LogType } from "./data/LogType"; import { LogType } from "./data/LogType";
import { DiscordJSError } from "./DiscordJSError"; import { DiscordJSError } from "./DiscordJSError";
import "./loadEnv";
import { logger } from "./logger"; import { logger } from "./logger";
import { baseGuildPlugins, globalPlugins, guildPlugins } from "./plugins/availablePlugins"; import { baseGuildPlugins, globalPlugins, guildPlugins } from "./plugins/availablePlugins";
import { RecoverablePluginError } from "./RecoverablePluginError"; import { RecoverablePluginError } from "./RecoverablePluginError";
@ -37,12 +36,7 @@ import { runPhishermanCacheCleanupLoop, runPhishermanReportingLoop } from "./dat
import { hasPhishermanMasterAPIKey } from "./data/Phisherman"; import { hasPhishermanMasterAPIKey } from "./data/Phisherman";
import { consumeQueryStats } from "./data/queryLogger"; import { consumeQueryStats } from "./data/queryLogger";
import { EventEmitter } from "events"; import { EventEmitter } from "events";
import { env } from "./env";
if (!process.env.KEY) {
// tslint:disable-next-line:no-console
console.error("Project root .env with KEY is required!");
process.exit(1);
}
// Error handling // Error handling
let recentPluginErrors = 0; let recentPluginErrors = 0;
@ -413,5 +407,5 @@ connect().then(async () => {
bot.initialize(); bot.initialize();
logger.info("Bot Initialized"); logger.info("Bot Initialized");
logger.info("Logging in..."); logger.info("Logging in...");
await client.login(process.env.TOKEN); await client.login(env.BOT_TOKEN);
}); });

View file

@ -1,4 +0,0 @@
import path from "path";
require("dotenv").config({ path: path.resolve(process.cwd(), "../.env") });
require("dotenv").config({ path: path.resolve(process.cwd(), "bot.env") });

View file

@ -4,8 +4,6 @@ import { LogType } from "../../../data/LogType";
import { noop } from "../../../utils"; import { noop } from "../../../utils";
import { automodAction } from "../helpers"; import { automodAction } from "../helpers";
const cleanDebugServer = process.env.TEMP_CLEAN_DEBUG_SERVER;
export const CleanAction = automodAction({ export const CleanAction = automodAction({
configType: t.boolean, configType: t.boolean,
defaultConfig: false, defaultConfig: false,
@ -29,26 +27,13 @@ export const CleanAction = automodAction({
} }
} }
if (pluginData.guild.id === cleanDebugServer) {
const toDeleteFormatted = Array.from(messageIdsToDeleteByChannelId.entries())
.map(([channelId, messageIds]) => `- ${channelId}: ${messageIds.join(", ")}`)
.join("\n");
// tslint:disable-next-line:no-console
console.log(`[DEBUG] Cleaning messages (${ruleName}):\n${toDeleteFormatted}`);
}
for (const [channelId, messageIds] of messageIdsToDeleteByChannelId.entries()) { for (const [channelId, messageIds] of messageIdsToDeleteByChannelId.entries()) {
for (const id of messageIds) { for (const id of messageIds) {
pluginData.state.logs.ignoreLog(LogType.MESSAGE_DELETE, id); pluginData.state.logs.ignoreLog(LogType.MESSAGE_DELETE, id);
} }
const channel = pluginData.guild.channels.cache.get(channelId as Snowflake) as TextChannel; const channel = pluginData.guild.channels.cache.get(channelId as Snowflake) as TextChannel;
await channel.bulkDelete(messageIds as Snowflake[]).catch((err) => { await channel.bulkDelete(messageIds as Snowflake[]).catch(noop);
if (pluginData.guild.id === cleanDebugServer) {
// tslint:disable-next-line:no-console
console.error(`[DEBUG] Failed to bulk delete messages (${ruleName}): ${err}`);
}
});
} }
}, },
}); });

View file

@ -1,6 +1,8 @@
import { env } from "./env";
/** /**
* Zeppelin staff have full access to the dashboard * Zeppelin staff have full access to the dashboard
*/ */
export function isStaff(userId: string) { export function isStaff(userId: string) {
return (process.env.STAFF ?? "").split(",").includes(userId); return (env.STAFF ?? []).includes(userId);
} }

View file

@ -1,21 +1,14 @@
import { spawn, Worker, Pool } from "threads"; import { spawn, Worker, Pool } from "threads";
import "../loadEnv";
import type { CryptFns } from "./cryptWorker"; import type { CryptFns } from "./cryptWorker";
import { MINUTES } from "../utils"; import { MINUTES } from "../utils";
import { env } from "../env";
if (!process.env.KEY) {
// tslint:disable-next-line:no-console
console.error("Environment value KEY required for encryption");
process.exit(1);
}
const KEY = process.env.KEY;
const pool = Pool(() => spawn(new Worker("./cryptWorker"), { timeout: 10 * MINUTES }), 8); const pool = Pool(() => spawn(new Worker("./cryptWorker"), { timeout: 10 * MINUTES }), 8);
export async function encrypt(data: string) { export async function encrypt(data: string) {
return pool.queue((w) => w.encrypt(data, KEY)); return pool.queue((w) => w.encrypt(data, env.KEY));
} }
export async function decrypt(data: string) { export async function decrypt(data: string) {
return pool.queue((w) => w.decrypt(data, KEY)); return pool.queue((w) => w.decrypt(data, env.KEY));
} }

View file

@ -1,4 +1,4 @@
require("dotenv").config(); require("dotenv").config({ path: path.resolve(process.cwd(), "../.env") });
const path = require("path"); const path = require("path");
const VueLoaderPlugin = require("vue-loader/lib/plugin"); const VueLoaderPlugin = require("vue-loader/lib/plugin");

View file

@ -1,18 +1,27 @@
FROM ubuntu:20.04 FROM ubuntu:20.04
ARG DOCKER_UID ARG DOCKER_UID
ARG DOCKER_DEV_SSH_PASSWORD
ENV DEBIAN_FRONTEND=noninteractive ENV DEBIAN_FRONTEND=noninteractive
ENV TZ=UTC ENV TZ=UTC
# Set up some core packages
RUN apt-get update
RUN apt-get install -y sudo git curl
# Set up SSH access # Set up SSH access
RUN apt-get update && apt-get install -y openssh-server sudo git RUN apt-get install -y openssh-server iptables
RUN mkdir /var/run/sshd RUN mkdir /var/run/sshd
RUN useradd -rm -d /home/ubuntu -s /bin/bash -g root -G sudo -u "${DOCKER_UID}" ubuntu RUN useradd -rm -d /home/ubuntu -s /bin/bash -g root -G sudo -u "${DOCKER_UID}" ubuntu
RUN echo 'ubuntu:password' | chpasswd RUN echo "ubuntu:${DOCKER_DEV_SSH_PASSWORD}" | chpasswd
# Install Node.js 16 # Set up proper permissions for volumes
RUN mkdir -p /home/ubuntu/zeppelin /home/ubuntu/.vscode-remote /home/ubuntu/.vscode-server /home/ubuntu/.cache/JetBrains
RUN chown -R ubuntu /home/ubuntu
# Install Node.js 16 and packages needed to build native packages
RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash - RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -
RUN apt-get install -y nodejs RUN apt-get install -y nodejs gcc g++ make python3
CMD /usr/sbin/sshd -D -e CMD /usr/sbin/sshd -D -e

View file

@ -1,28 +1,33 @@
version: '3' version: '3'
volumes:
mysql-data: {}
vscode-remote: {}
vscode-server: {}
jetbrains-data: {}
services: services:
# nginx: nginx:
# user: "${UID:?Missing UID}:${GID:?Missing GID}" build:
# build: context: ./nginx
# context: ./nginx args:
# args: API_DOMAIN: ${API_DOMAIN:?Missing API_DOMAIN}
# API_DOMAIN: ${API_DOMAIN:?Missing API_DOMAIN} API_PORT: ${API_PORT:?Missing API_PORT}
# API_PORT: ${API_PORT:?Missing API_PORT} DASHBOARD_DOMAIN: ${DASHBOARD_DOMAIN:?Missing DASHBOARD_DOMAIN}
# DASHBOARD_DOMAIN: ${DASHBOARD_DOMAIN:?Missing DASHBOARD_DOMAIN} ports:
# DASHBOARD_PORT: ${DASHBOARD_PORT:?Missing DASHBOARD_PORT} - ${DOCKER_WEB_PORT:?Missing DOCKER_WEB_PORT}:443
# ports: volumes:
# - ${PORT:?Missing PORT}:443 - ../../:/zeppelin
# volumes:
# - ./:/zeppelin
#
mysql: mysql:
image: mysql:8.0 image: mysql:8.0
environment: environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PASSWORD?:Missing MYSQL_ROOT_PASSWORD} MYSQL_ROOT_PASSWORD: ${DOCKER_MYSQL_ROOT_PASSWORD?:Missing DOCKER_MYSQL_ROOT_PASSWORD}
MYSQL_DATABASE: zeppelin MYSQL_DATABASE: zeppelin
MYSQL_USER: zeppelin MYSQL_USER: zeppelin
MYSQL_PASSWORD: ${MYSQL_PASSWORD?:Missing MYSQL_PASSWORD} MYSQL_PASSWORD: ${DOCKER_MYSQL_PASSWORD?:Missing DOCKER_MYSQL_PASSWORD}
ports: ports:
- ${MYSQL_PORT:?Missing MYSQL_PORT}:3306 - ${DOCKER_MYSQL_PORT:?Missing DOCKER_MYSQL_PORT}:3306
volumes:
- mysql-data:/var/lib/mysql
# #
# backend: # backend:
# image: node:16 # image: node:16
@ -50,7 +55,12 @@ services:
args: args:
DOCKER_UID: ${DOCKER_UID:?Missing DOCKER_UID} DOCKER_UID: ${DOCKER_UID:?Missing DOCKER_UID}
DOCKER_GID: ${DOCKER_GID:?Missing DOCKER_GID} DOCKER_GID: ${DOCKER_GID:?Missing DOCKER_GID}
DOCKER_DEV_SSH_PASSWORD: ${DOCKER_DEV_SSH_PASSWORD:?Missing DOCKER_DEV_SSH_PASSWORD}
ports: ports:
- "${DEVELOPMENT_SSH_PORT:?Missing DEVELOPMENT_SSH_PORT}:22" - "${DOCKER_DEV_SSH_PORT:?Missing DOCKER_DEV_SSH_PORT}:22"
volumes: volumes:
- ../../:/zeppelin - ../../:/home/ubuntu/zeppelin
- ~/.ssh:/home/ubuntu/.ssh
- vscode-remote:/home/ubuntu/.vscode-remote
- vscode-server:/home/ubuntu/.vscode-server
- jetbrains-data:/home/ubuntu/.cache/JetBrains

View file

@ -2,11 +2,13 @@ FROM nginx
ARG API_DOMAIN ARG API_DOMAIN
ARG DASHBOARD_DOMAIN ARG DASHBOARD_DOMAIN
ARG API_PORT
RUN apt-get update && apt-get install -y openssl RUN apt-get update && apt-get install -y openssl
RUN openssl req -x509 -newkey rsa:4096 -keyout /etc/ssl/private/api-cert.key -out /etc/ssl/certs/api-cert.pem -days 365 -subj '/CN=*.${API_DOMAIN}' -nodes RUN openssl req -x509 -newkey rsa:4096 -keyout /etc/ssl/private/api-cert.key -out /etc/ssl/certs/api-cert.pem -days 3650 -subj '/CN=*.${API_DOMAIN}' -nodes
RUN openssl req -x509 -newkey rsa:4096 -keyout /etc/ssl/private/dashboard-cert.key -out /etc/ssl/certs/dashboard-cert.pem -days 365 -subj '/CN=*.${DASHBOARD_DOMAIN}' -nodes RUN openssl req -x509 -newkey rsa:4096 -keyout /etc/ssl/private/dashboard-cert.key -out /etc/ssl/certs/dashboard-cert.pem -days 3650 -subj '/CN=*.${DASHBOARD_DOMAIN}' -nodes
COPY ./default.conf /etc/nginx/conf.d/default.conf COPY ./default.conf /etc/nginx/conf.d/default.conf
RUN sed -ir "s/_API_DOMAIN_/$(echo ${API_DOMAIN} | sed -ir 's///g')/g" RUN sed -ir "s/_API_DOMAIN_/$(echo ${API_DOMAIN} | sed 's/\./\\./g')/g" /etc/nginx/conf.d/default.conf
RUN sed -ir "s/_DASHBOARD_DOMAIN_/$(echo ${DASHBOARD_DOMAIN} | sed 's/\./\\\\./g')/g" RUN sed -ir "s/_DASHBOARD_DOMAIN_/$(echo ${DASHBOARD_DOMAIN} | sed 's/\./\\./g')/g" /etc/nginx/conf.d/default.conf
RUN sed -ir "s/_API_PORT_/${API_PORT}/g" /etc/nginx/conf.d/default.conf

View file

@ -4,7 +4,9 @@ server {
server_name _API_DOMAIN_; server_name _API_DOMAIN_;
location / { location / {
proxy_pass backend:3000; # Using a variable here stops nginx from crashing if the dev container is restarted or becomes otherwise unavailable
set $backend_upstream devenv;
proxy_pass http://$backend_upstream:_API_PORT_;
client_max_body_size 200M; client_max_body_size 200M;
} }
@ -23,7 +25,7 @@ server {
server { server {
listen 443 ssl http2; listen 443 ssl http2;
listen [::]:443 ssl http2; listen [::]:443 ssl http2;
server_name dashboard.dev.zeppelin.gg; server_name _DASHBOARD_DOMAIN_;
root /zeppelin/dashboard/dist; root /zeppelin/dashboard/dist;