It is now possible to display #1

Merged
Hare merged 24 commits from develop into master 2024-08-26 18:55:10 +09:00
4 changed files with 111 additions and 110 deletions
Showing only changes of commit c4a37dcdc5 - Show all commits

View File

@ -1,6 +1,5 @@
import pg from 'pg'; import pg from 'pg';
const { Pool } = pg; const { Pool } = pg;
import * as tc from './database/tablectl';
import { import {
PG_USER, PG_USER,
@ -11,72 +10,38 @@ import {
} from '$env/static/private' } from '$env/static/private'
const connectionString = `postgres://${PG_USER}:${PG_PASS}@${PG_HOST}:${PG_PORT}/${PG_DB}`; const connectionString = `postgres://${PG_USER}:${PG_PASS}@${PG_HOST}:${PG_PORT}/${PG_DB}`;
export const pool = new Pool({ const pool = new Pool({ connectionString });
connectionString: connectionString, class Postgres {
max: 2, client: pg.PoolClient | null = null;
}); public static async new() {
const pg = new Postgres();
// initialize pg.client = await pool.connect();
// import fs from 'fs'; return pg;
// type ArticleFileItem = {
// path: string,
// id: string,
// }
const schemas: tc.TableSchema[] = [
{
name: 'article',
columns: [
{
name: 'id',
type: 'serial',
constraint: 'primary key',
},
{
name: 'title',
type: 'text',
constraint: 'not null',
},
{
name: 'content',
type: 'text',
constraint: 'not null',
},
],
},
{
name: 'fuga',
columns: [
{
name: 'id',
type: 'serial',
constraint: 'primary key',
},
{
name: 'name',
type: 'text',
constraint: 'not null',
},
{
name: 'email',
type: 'text',
constraint: 'not null',
},
{
name: 'password',
type: 'text',
constraint: 'not null',
},
],
},
];
(async () => {
for (const schema of schemas) {
const exists = await tc.is_table_exists(pool, schema.name);
if (!exists) {
console.log(`Creating table ${schema.name}`);
await tc.create_table(pool, schema);
} else {
console.log(`Table ${schema.name} already exists`);
}
} }
})();
async query(query: string, params: any[] = []) {
return (await this.client!.query(query, params));
}
async release() {
await this.client!.release(true);
}
async begin() {
await this.client!.query('begin');
}
async commit() {
await this.client!.query('commit');
}
async rollback() {
await this.client!.query('rollback');
}
}
export default async () => { return await Postgres.new(); }
import init from '$lib/server/database/init_db';
await init();

View File

@ -0,0 +1,72 @@
// initialize
import PG from '$lib/server/database';
import fs from 'fs';
export default async function init() {
// Create tables(when not exists)
const schemas = [
{
name: 'article',
columns: [
{ name: 'id', type: 'serial', constraint: 'primary key' },
{ name: 'title', type: 'text', constraint: 'not null' },
{ name: 'released_at', type: 'timestamp', constraint: 'not null' },
{ name: 'updated_at', type: 'timestamp', constraint: 'not null' },
{ name: 'tags', type: 'text[]', constraint: 'not null' },
{ name: 'content', type: 'text', constraint: 'not null' },
],
},
];
const db = await PG();
try {
await db.begin();
for (const schema of schemas) {
const res = await db.query(`select * from information_schema.tables where table_name = '${schema.name}'`)
if (res.rowCount === null) {
console.log(`Creating table ${schema.name}`);
const columnStr = schema.columns.map(c => `${c.name} ${c.type} ${c.constraint}`).join(', ');
await db.query(`create table ${schema.name} (${columnStr})`);
} else {
console.log(`Table ${schema.name} already exists`);
}
}
await db.commit();
} catch (err) {
console.error(err);
await db.rollback();
}
// Check if the article are already in the database
const articleFiles: ArticleFileItem[] = [];
function scanDir(path: string) {
const files = fs.readdirSync(path);
for (const file of files) {
const dir = `${path}/${file}`;
const stat = fs.statSync(dir);
if (stat.isDirectory()) {
scanDir(dir);
} else {
// articleFiles.push({ path: `${path}/${file}`, id: file });
console.log(`${path}/${file}`);
}
}
}
scanDir('./articles/dist');
await db.release();
}
type ArticleFileItem = {
path: string,
id: string,
}
export type TableSchema = {
name: string,
columns: {
name: string,
type: string,
constraint: string,
}[],
}

View File

@ -1,30 +0,0 @@
import pg from 'pg';
export type TableSchema = {
name: string,
columns: {
name: string,
type: string,
constraint: string,
}[],
}
export const is_table_exists = async (pool: pg.Pool, name: string) => {
const query = `select * from information_schema.tables where table_name = '${name}'`;
const res = await pool.query(query);
return res.rows.length > 0;
}
export const create_table = async (pool: pg.Pool, schema: TableSchema) => {
const { name, columns } = schema;
const columnStr = columns.map(c => `${c.name} ${c.type} ${c.constraint}`).join(', ');
const query = `create table ${name} (${columnStr})`;
try {
await pool.query(query);
} catch (err) {
console.log(query);
console.error(err);
}
}

View File

@ -1,8 +1,7 @@
import { error } from '@sveltejs/kit'; import { error } from '@sveltejs/kit';
import type { PageServerLoad } from './$types'; import type { PageServerLoad } from './$types';
// import index from '$lib/index';
import type { Content } from '$lib/article'; import type { Content } from '$lib/article';
import { pool } from '$lib/server/database'; import PG from '$lib/server/database';
let data: { let data: {
recent: Content[], recent: Content[],
@ -23,16 +22,11 @@ let data: {
}; };
export const load: PageServerLoad = async ({ params }) => { export const load: PageServerLoad = async ({ params }) => {
const connect = await pool.connect(); const db = await PG();
const result = await connect.query('SELECT NOW()') const now = await db.query('SELECT NOW()');
.then((res) => { await db.release();
data.updated = res.rows[0].now.toISOString();
})
.catch((err) => {
error(err);
});
connect.release();
data.updated = now.rows[0].now;
return data; return data;
} }