feat: Add database connection handling to server hooks

This commit is contained in:
Keisuke Hirata 2024-08-28 00:13:46 +09:00
parent e7949b6c5f
commit d3cc3145e0
8 changed files with 109 additions and 87 deletions

6
src/app.d.ts vendored
View File

@ -1,9 +1,13 @@
// See https://kit.svelte.dev/docs/types#app // See https://kit.svelte.dev/docs/types#app
// for information about these interfaces // for information about these interfaces
import { Database } from 'pg';
declare global { declare global {
namespace App { namespace App {
// interface Error {} // interface Error {}
// interface Locals {} interface Locals {
db: any;
}
// interface PageData {} // interface PageData {}
// interface PageState {} // interface PageState {}
// interface Platform {} // interface Platform {}

10
src/hooks.server.ts Normal file
View File

@ -0,0 +1,10 @@
import type { Handle } from '@sveltejs/kit'
import { getConnection } from '$lib/server/database/get_connection'
export const handle: Handle = async ({ event, resolve }) => {
const pg = getConnection();
event.locals.db = pg;
const result = await resolve(event);
return result;
}

View File

@ -1,21 +1,12 @@
import pg from 'pg'; import pg from 'pg';
const { Pool } = pg; import { Pool } from 'pg';
import { getConnection } from './database/get_connection';
import { export class Postgres {
PG_USER,
PG_PASS,
PG_HOST,
PG_PORT,
PG_DB,
} from '$env/static/private'
const connectionString = `postgres://${PG_USER}:${PG_PASS}@${PG_HOST}:${PG_PORT}/${PG_DB}`;
const pool = new Pool({ connectionString });
class Postgres {
client: pg.PoolClient | null = null; client: pg.PoolClient | null = null;
public static async new() { public static async new(pool: Pool) {
const pg = new Postgres(); const pg = new Postgres();
pg.client = await pool.connect(); pg.client = await getConnection().connect();
return pg; return pg;
} }
@ -40,8 +31,10 @@ class Postgres {
} }
} }
export default async () => { return await Postgres.new(); } export default async (
pool: Pool
) => { return await Postgres.new(pool); };
import { building } from '$app/environment';
import init from '$lib/server/database/init_db'; import init from '$lib/server/database/init_db';
await init(); if (!building) await init(await Postgres.new(getConnection()));

View File

@ -0,0 +1,15 @@
import pg from 'pg';
const { Pool } = pg;
import {
PG_USER,
PG_PASS,
PG_HOST,
PG_PORT,
PG_DB,
} from '$env/static/private'
const connectionString = `postgres://${PG_USER}:${PG_PASS}@${PG_HOST}:${PG_PORT}/${PG_DB}`;
const pool = new Pool({ connectionString });
export const getConnection = () => pool;

View File

@ -1,10 +1,9 @@
// initialize // initialize
import PG from '$lib/server/database'; import PG from '$lib/server/database';
import type { Postgres } from '$lib/server/database';
import fs from 'fs'; import fs from 'fs';
import { compile } from 'mdsvex'; import { compile } from 'mdsvex';
export default async function init(db: Postgres) {
export default async function init() {
// Create tables(when not exists) // Create tables(when not exists)
const schemas = [ const schemas = [
{ {
@ -73,9 +72,9 @@ export default async function init() {
} }
]; ];
const db = await PG(); await db.begin();
try { try {
await db.begin(); // Create tables
for (const schema of schemas) { for (const schema of schemas) {
const res = await db.query(`select * from information_schema.tables where table_name = '${schema.name}'`) const res = await db.query(`select * from information_schema.tables where table_name = '${schema.name}'`)
if (res.rowCount == 0) { if (res.rowCount == 0) {
@ -87,68 +86,69 @@ export default async function init() {
} }
} }
await db.commit(); await db.commit();
const articleFiles: ArticleFileItem[] = [];
function scanDir(path: string) {
const files = fs.readdirSync(path);
for (const file of files) {
const dir = `${path}/${file}`;
const stat = fs.statSync(dir);
if (stat.isDirectory()) {
scanDir(dir);
} else {
articleFiles.push({ path: `${path}/${file}`, id: file });
}
}
}
scanDir('./articles/article');
await db.query('update tag set ref_count = 0');
db.commit();
for (const { path, id } of articleFiles) {
const res = await db.query('select * from article where id = $1', [id]);
const compiled = await compile(fs.readFileSync(path, 'utf-8'));
const title = compiled.data.fm.title;
const category = path.split('/')[3];
const tags: string[] = compiled.data.fm.tags;
const released_at = new Date(compiled.data.fm.released_at);
const updated_at = new Date(compiled.data.fm.updated_at);
const image = compiled.data.fm.image;
const publish = compiled.data.fm.publish;
const content = compiled.code
.replace(/>{@html `<code class="language-/g, '><code class="language-')
.replace(/<\/code>`}<\/pre>/g, '</code></pre>');
if (res.rowCount == 0) {
console.log(`New article: ${id}`);
await db.query(
'insert into article (id, title, category, released_at, updated_at, tags, image, publish, content) values ($1, $2, $3, $4, $5, $6, $7, $8, $9)',
[id, title, category, released_at, updated_at, tags, image, publish, content]
);
} else if (res.rows[0].updated_at < updated_at) {
console.log(`Update article: ${id}`);
await db.query(
'update article set title = $2, updated_at = $4, tags = $5, content = $6 where id = $1',
[id, title, updated_at, tags, content]
);
} else {
console.log(`Article ${id} is already up-to-date`);
}
for (const tag of tags) {
if ((await db.query('select * from tag where name = $1', [tag])).rowCount == 0) {
db.query('insert into tag (name, ref_count) values ($1, 1)', [tag]);
} else {
db.query('update tag set ref_count = ref_count + 1 where name = $1', [tag]);
}
}
}
await db.commit();
} catch (err) { } catch (err) {
console.error(err); console.error(err);
await db.rollback(); await db.rollback();
} finally {
await db.release();
} }
const articleFiles: ArticleFileItem[] = [];
function scanDir(path: string) {
const files = fs.readdirSync(path);
for (const file of files) {
const dir = `${path}/${file}`;
const stat = fs.statSync(dir);
if (stat.isDirectory()) {
scanDir(dir);
} else {
articleFiles.push({ path: `${path}/${file}`, id: file });
}
}
}
scanDir('./articles/article');
await db.query('update tag set ref_count = 0');
db.commit();
for (const { path, id } of articleFiles) {
const res = await db.query('select * from article where id = $1', [id]);
const compiled = await compile(fs.readFileSync(path, 'utf-8'));
const title = compiled.data.fm.title;
const category = path.split('/')[3];
const tags: string[] = compiled.data.fm.tags;
const released_at = new Date(compiled.data.fm.released_at);
const updated_at = new Date(compiled.data.fm.updated_at);
const image = compiled.data.fm.image;
const publish = compiled.data.fm.publish;
const content = compiled.code
.replace(/>{@html `<code class="language-/g, '><code class="language-')
.replace(/<\/code>`}<\/pre>/g, '</code></pre>');
if (res.rowCount == 0) {
console.log(`New article: ${id}`);
await db.query(
'insert into article (id, title, category, released_at, updated_at, tags, image, publish, content) values ($1, $2, $3, $4, $5, $6, $7, $8, $9)',
[id, title, category, released_at, updated_at, tags, image, publish, content]
);
} else if (res.rows[0].updated_at < updated_at) {
console.log(`Update article: ${id}`);
await db.query(
'update article set title = $2, updated_at = $4, tags = $5, content = $6 where id = $1',
[id, title, updated_at, tags, content]
);
} else {
console.log(`Article ${id} is already up-to-date`);
}
for (const tag of tags) {
if ((await db.query('select * from tag where name = $1', [tag])).rowCount == 0) {
db.query('insert into tag (name, ref_count) values ($1, 1)', [tag]);
} else {
db.query('update tag set ref_count = ref_count + 1 where name = $1', [tag]);
}
}
}
await db.commit();
await db.release();
} }
type ArticleFileItem = { type ArticleFileItem = {

View File

@ -18,8 +18,8 @@ let data: {
updated: "", updated: "",
}; };
export const load: PageServerLoad = async ({ params }) => { export const load: PageServerLoad = async ({ params, locals }) => {
const db = await PG(); const db = await PG(locals.db);
await db.begin(); await db.begin();
try { try {
const recent_articles = await db.query( const recent_articles = await db.query(

View File

@ -3,11 +3,11 @@ import type { PageServerLoad } from './$types';
import PG from '$lib/server/database'; import PG from '$lib/server/database';
import { error } from '@sveltejs/kit'; import { error } from '@sveltejs/kit';
export const load: PageServerLoad = async ({ params }) => { export const load: PageServerLoad = async ({ params, locals }) => {
const { category, id } = params; const { category, id } = params;
console.log(id); console.log(id);
const db = await PG(); const db = await PG(locals.db);
await db.begin(); await db.begin();
try { try {
const article = await db.query( const article = await db.query(

View File

@ -3,11 +3,11 @@ import type { PageServerLoad } from './$types';
import PG from '$lib/server/database'; import PG from '$lib/server/database';
import { error } from '@sveltejs/kit'; import { error } from '@sveltejs/kit';
export const load: PageServerLoad = async ({ params }) => { export const load: PageServerLoad = async ({ params, locals }) => {
const { category, id, series } = params; const { category, id, series } = params;
console.log(id); console.log(id);
const db = await PG(); const db = await PG(locals.db);
await db.begin(); await db.begin();
try { try {
const article = await db.query( const article = await db.query(