Skip to content

Commit

Permalink
Implemented debug flag to turn off debug logging
Browse files Browse the repository at this point in the history
  • Loading branch information
jordan-dalby committed Nov 19, 2024
1 parent 5e2ccc5 commit c112f71
Show file tree
Hide file tree
Showing 14 changed files with 133 additions and 87 deletions.
15 changes: 15 additions & 0 deletions docker-compose-dev.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
services:
server:
build:
context: .
dockerfile: Dockerfile
ports:
- "5001:5000"
environment:
- BASE_PATH=
- JWT_SECRET=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.nhan23TF0qyO4l4rDMkJ8ebNLMgV62NGfBozt9huymA
- TOKEN_EXPIRY=24h
- ALLOW_NEW_ACCOUNTS=true
- DEBUG=true
volumes:
- ./data:/data/snippets
4 changes: 3 additions & 1 deletion docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@ services:
# e.g. write /bytestash for a domain such as my.domain/bytestash, leave blank in every other case
- BASE_PATH=
# the jwt secret used by the server, make sure to generate your own secret token to replace this one
- JWT_SECRET=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.nhan23TF0qyO4l4rDMkJ8ebNLMgV62NGfBozt9huymA
- JWT_SECRET=your-jwt-token
# how long the token lasts, examples: "2 days", "10h", "7d", "1m", "60s"
- TOKEN_EXPIRY=24h
# is this bytestash instance open to new accounts being created?
- ALLOW_NEW_ACCOUNTS=true
# Should debug mode be enabled? Essentially enables logging, in most cases leave this as false
- DEBUG=false
volumes:
- ./data:/data/snippets
5 changes: 3 additions & 2 deletions server/src/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const shareRoutes = require('./routes/shareRoutes')
const { authenticateToken } = require('./middleware/auth');
const { join } = require('path');
const fs = require('fs');
const Logger = require('./logger');

const app = express();
const PORT = 5000;
Expand Down Expand Up @@ -71,7 +72,7 @@ app.get(`${basePath}/*`, (req, res, next) => {
});

function handleShutdown() {
console.log('Received shutdown signal, starting graceful shutdown...');
Logger.info('Received shutdown signal, starting graceful shutdown...');

shutdownDatabase();

Expand All @@ -86,7 +87,7 @@ process.on('SIGINT', handleShutdown);

return new Promise((resolve) => {
app.listen(PORT, () => {
console.log(`Server running on port ${PORT}`);
Logger.info(`Server running on port ${PORT}`);
resolve();
});
});
Expand Down
49 changes: 20 additions & 29 deletions server/src/config/database.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,41 +3,32 @@ const path = require('path');
const fs = require('fs');
const { up_v1_4_0 } = require('./migrations/20241111-migration');
const { up_v1_5_0 } = require('./migrations/20241117-migration');
const Logger = require('../logger');

let db = null;
let checkpointInterval = null;

function getDatabasePath() {
if (process.env.NODE_ENV === 'production') {
const userDataPath = './data/snippets';

if (!fs.existsSync(userDataPath)) {
fs.mkdirSync(userDataPath, { recursive: true });
}

return path.join(userDataPath, 'snippets.db');
} else {
const devPath = path.join(__dirname, '../../../data/snippets');
if (!fs.existsSync(devPath)) {
fs.mkdirSync(devPath, { recursive: true });
}
return path.join(devPath, 'snippets.db');
const dbPath = path.join(__dirname, '../../../data/snippets');
if (!fs.existsSync(dbPath)) {
fs.mkdirSync(dbPath, { recursive: true });
}
return path.join(dbPath, 'snippets.db');
}

function checkpointDatabase() {
if (!db) return;

try {
console.log('Starting database checkpoint...');
Logger.debug('Starting database checkpoint...');
const start = Date.now();

db.pragma('wal_checkpoint(PASSIVE)');

const duration = Date.now() - start;
console.log(`Database checkpoint completed in ${duration}ms`);
Logger.debug(`Database checkpoint completed in ${duration}ms`);
} catch (error) {
console.error('Error during database checkpoint:', error);
Logger.error('Error during database checkpoint:', error);
}
}

Expand Down Expand Up @@ -66,14 +57,14 @@ function backupDatabase(dbPath) {
if (fs.existsSync(dbPath)) {
const dbBackupPath = `${baseBackupPath}.db`;
fs.copyFileSync(dbPath, dbBackupPath);
console.log(`Database backed up to: ${dbBackupPath}`);
Logger.debug(`Database backed up to: ${dbBackupPath}`);
} else {
console.error(`Database file not found: ${dbPath}`);
Logger.error(`Database file not found: ${dbPath}`);
return false;
}
return true;
} catch (error) {
console.error('Failed to create database backup:', error);
Logger.error('Failed to create database backup:', error);
throw error;
}
}
Expand Down Expand Up @@ -132,12 +123,12 @@ function createInitialSchema(db) {
function initializeDatabase() {
try {
const dbPath = getDatabasePath();
console.log(`Initializing SQLite database at: ${dbPath}`);
Logger.debug(`Initializing SQLite database at: ${dbPath}`);

const dbExists = fs.existsSync(dbPath);

db = new Database(dbPath, {
verbose: console.log,
verbose: Logger.debug,
fileMustExist: false
});

Expand All @@ -147,21 +138,21 @@ function initializeDatabase() {
backupDatabase(dbPath);

if (!dbExists) {
console.log('Creating new database with initial schema...');
Logger.debug('Creating new database with initial schema...');
createInitialSchema(db);
} else {
console.log('Database file exists, checking for needed migrations...');
Logger.debug('Database file exists, checking for needed migrations...');

up_v1_4_0(db);
up_v1_5_0(db);
}

startCheckpointInterval();

console.log('Database initialization completed successfully');
Logger.debug('Database initialization completed successfully');
return db;
} catch (error) {
console.error('Database initialization error:', error);
Logger.error('Database initialization error:', error);
throw error;
}
}
Expand All @@ -176,16 +167,16 @@ function getDb() {
function shutdownDatabase() {
if (db) {
try {
console.log('Performing final database checkpoint...');
Logger.debug('Performing final database checkpoint...');
db.pragma('wal_checkpoint(TRUNCATE)');

stopCheckpointInterval();
db.close();
db = null;

console.log('Database shutdown completed successfully');
Logger.debug('Database shutdown completed successfully');
} catch (error) {
console.error('Error during database shutdown:', error);
Logger.error('Error during database shutdown:', error);
throw error;
}
}
Expand Down
10 changes: 6 additions & 4 deletions server/src/config/migrations/20241111-migration.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
const Logger = require("../../logger");

function needsMigration(db) {
const hasCodeColumn = db.prepare(`
SELECT COUNT(*) as count
Expand All @@ -10,11 +12,11 @@ function needsMigration(db) {

async function up_v1_4_0(db) {
if (!needsMigration(db)) {
console.log('v1.4.0 - Migration not necessary');
Logger.debug('v1.4.0 - Migration not necessary');
return;
}

console.log('v1.4.0 - Starting migration to fragments...');
Logger.debug('v1.4.0 - Starting migration to fragments...');

db.pragma('foreign_keys = OFF');

Expand Down Expand Up @@ -70,9 +72,9 @@ async function up_v1_4_0(db) {
`);
})();

console.log('Migration completed successfully');
Logger.debug('Migration completed successfully');
} catch (error) {
console.error('Migration failed:', error);
Logger.error('Migration failed:', error);
throw error;
} finally {
db.pragma('foreign_keys = ON');
Expand Down
26 changes: 14 additions & 12 deletions server/src/config/migrations/20241117-migration.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
const Logger = require("../../logger");

function needsMigration(db) {
try {
const hasUsersTable = db.prepare(`
Expand All @@ -7,7 +9,7 @@ function needsMigration(db) {
`).get();

if (!hasUsersTable) {
console.log('Users table does not exist, migration needed');
Logger.debug('Users table does not exist, migration needed');
return true;
}

Expand All @@ -18,7 +20,7 @@ function needsMigration(db) {
`).get();

if (hasUserIdColumn.count === 0) {
console.log('Snippets table missing user_id column, migration needed');
Logger.debug('Snippets table missing user_id column, migration needed');
return true;
}

Expand All @@ -29,25 +31,25 @@ function needsMigration(db) {
`).get();

if (hasUserIdIndex.count === 0) {
console.log('Missing user_id index, migration needed');
Logger.debug('Missing user_id index, migration needed');
return true;
}

console.log('Database schema is up to date, no migration needed');
Logger.debug('Database schema is up to date, no migration needed');
return false;
} catch (error) {
console.error('Error checking migration status:', error);
Logger.error('Error checking migration status:', error);
throw error;
}
}

async function up_v1_5_0(db) {
if (!needsMigration(db)) {
console.log('v1.5.0 - Migration is not needed, database is up to date');
Logger.debug('v1.5.0 - Migration is not needed, database is up to date');
return;
}

console.log('v1.5.0 - Starting migration: Adding users table and updating snippets...');
Logger.debug('v1.5.0 - Starting migration: Adding users table and updating snippets...');

try {
db.exec(`
Expand All @@ -66,27 +68,27 @@ async function up_v1_5_0(db) {
CREATE INDEX idx_snippets_user_id ON snippets(user_id);
`);

console.log('Migration completed successfully');
Logger.debug('Migration completed successfully');
} catch (error) {
console.error('Migration failed:', error);
Logger.error('Migration failed:', error);
throw error;
}
}

async function up_v1_5_0_snippets(db, userId) {
try {
console.log(`Migrating orphaned snippets to user ${userId}...`);
Logger.debug(`Migrating orphaned snippets to user ${userId}...`);

const updateSnippets = db.prepare(`
UPDATE snippets SET user_id = ? WHERE user_id IS NULL
`);

const result = updateSnippets.run(userId);
console.log(`Successfully migrated ${result.changes} snippets to user ${userId}`);
Logger.debug(`Successfully migrated ${result.changes} snippets to user ${userId}`);

return result.changes;
} catch (error) {
console.error('Snippet migration failed:', error);
Logger.error('Snippet migration failed:', error);
throw error;
}
}
Expand Down
26 changes: 26 additions & 0 deletions server/src/logger.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
const DEBUG = process.env.DEBUG === 'true';

class Logger {
static debug(...args) {
if (DEBUG) {
console.log('[DEBUG]', ...args);
}
}

static error(...args) {
if (DEBUG) {
console.error('[ERROR]', ...args);
} else {
const messages = args.map(arg =>
arg instanceof Error ? arg.message : arg
);
console.error('[ERROR]', ...messages);
}
}

static info(...args) {
console.log('[INFO]', ...args);
}
}

module.exports = Logger;
9 changes: 5 additions & 4 deletions server/src/repositories/shareRepository.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
const { getDb } = require('../config/database');
const crypto = require('crypto');
const Logger = require('../logger');

class ShareRepository {
constructor() {
Expand Down Expand Up @@ -126,7 +127,7 @@ class ShareRepository {
expiresIn
};
} catch (error) {
console.error('Error in createShare:', error);
Logger.error('Error in createShare:', error);
throw error;
}
}
Expand All @@ -137,7 +138,7 @@ class ShareRepository {
const share = this.getShareStmt.get(id);
return this.#processShare(share);
} catch (error) {
console.error('Error in getShare:', error);
Logger.error('Error in getShare:', error);
throw error;
}
}
Expand All @@ -151,7 +152,7 @@ class ShareRepository {
}
return this.getSharesBySnippetIdStmt.all(snippetIdInt, userId);
} catch (error) {
console.error('Error in getSharesBySnippetId:', error);
Logger.error('Error in getSharesBySnippetId:', error);
throw error;
}
}
Expand All @@ -161,7 +162,7 @@ class ShareRepository {
try {
return this.deleteShareStmt.run(id, userId);
} catch (error) {
console.error('Error in deleteShare:', error);
Logger.error('Error in deleteShare:', error);
throw error;
}
}
Expand Down
Loading

0 comments on commit c112f71

Please sign in to comment.