165 lines
6.1 KiB
TypeScript
165 lines
6.1 KiB
TypeScript
// initialize
|
|
import type { Postgres } from '$lib/server/database';
|
|
import fs from 'fs';
|
|
import { compile } from 'mdsvex';
|
|
export default async function init(db: Postgres) {
|
|
// Create tables(when not exists)
|
|
const schemas = [
|
|
{
|
|
name: 'article',
|
|
columns: [
|
|
{ name: 'seq', type: 'serial', constraint: 'primary key' },
|
|
{ name: 'id', type: 'text', constraint: 'not null' },
|
|
{ name: 'title', type: 'text', constraint: 'not null' },
|
|
{ name: 'category', type: 'text', constraint: 'not null' },
|
|
{ name: 'released_at', type: 'timestamp', constraint: 'not null' },
|
|
{ name: 'updated_at', type: 'timestamp', constraint: 'not null' },
|
|
{ name: 'tags', type: 'text[]', constraint: 'not null' },
|
|
{ name: 'image', type: 'text', constraint: '' },
|
|
{ name: 'publish', type: 'text', constraint: 'not null' },
|
|
{ name: 'content', type: 'text', constraint: 'not null' },
|
|
],
|
|
},
|
|
{
|
|
name: 'article_comment',
|
|
columns: [
|
|
{ name: 'id', type: 'serial', constraint: 'primary key' },
|
|
{ name: 'article', type: 'integer', constraint: 'not null' },
|
|
{ name: 'posted_at', type: 'timestamp', constraint: 'not null' },
|
|
{ name: 'content', type: 'text', constraint: 'not null' },
|
|
],
|
|
},
|
|
{
|
|
name: 'thread',
|
|
columns: [
|
|
{ name: 'seq', type: 'serial', constraint: 'primary key' },
|
|
{ name: 'id', type: 'text', constraint: 'not null' },
|
|
{ name: 'title', type: 'text', constraint: 'not null' },
|
|
{ name: 'category', type: 'text', constraint: 'not null' },
|
|
{ name: 'created_at', type: 'timestamp', constraint: 'not null' },
|
|
{ name: 'updated_at', type: 'timestamp', constraint: 'not null' },
|
|
{ name: 'tags', type: 'text[]', constraint: 'not null' },
|
|
{ name: 'content', type: 'text', constraint: 'not null' },
|
|
],
|
|
},
|
|
{
|
|
name: 'thread_post',
|
|
columns: [
|
|
{ name: 'seq', type: 'serial', constraint: 'primary key' },
|
|
{ name: 'thread_id', type: 'integer', constraint: 'not null' },
|
|
{ name: 'title', type: 'text', constraint: 'not null' },
|
|
{ name: 'posted_at', type: 'timestamp', constraint: 'not null' },
|
|
{ name: 'content', type: 'text', constraint: 'not null' },
|
|
],
|
|
},
|
|
{
|
|
name: 'thread_comment',
|
|
columns: [
|
|
{ name: 'id', type: 'serial', constraint: 'primary key' },
|
|
{ name: 'thread', type: 'integer', constraint: 'not null' },
|
|
{ name: 'posted_at', type: 'timestamp', constraint: 'not null' },
|
|
{ name: 'content', type: 'text', constraint: 'not null' },
|
|
],
|
|
},
|
|
{
|
|
name: 'tag',
|
|
columns: [
|
|
{ name: 'seq', type: 'serial', constraint: 'primary key' },
|
|
{ name: 'name', type: 'text', constraint: 'not null' },
|
|
{ name: 'ref_count', type: 'integer', constraint: 'not null' },
|
|
],
|
|
}
|
|
];
|
|
|
|
await db.begin();
|
|
try {
|
|
// Create tables
|
|
for (const schema of schemas) {
|
|
const res = await db.query(`select * from information_schema.tables where table_name = '${schema.name}'`)
|
|
if (res.rowCount == 0) {
|
|
console.log(`Creating table ${schema.name}`);
|
|
const columnStr = schema.columns.map(c => `${c.name} ${c.type} ${c.constraint}`).join(', ');
|
|
await db.query(`create table ${schema.name} (${columnStr})`);
|
|
} else {
|
|
console.log(`Table ${schema.name} already exists`);
|
|
}
|
|
}
|
|
await db.commit();
|
|
|
|
const articleFiles: ArticleFileItem[] = [];
|
|
function scanDir(path: string) {
|
|
const files = fs.readdirSync(path);
|
|
for (const file of files) {
|
|
const dir = `${path}/${file}`;
|
|
const stat = fs.statSync(dir);
|
|
if (stat.isDirectory()) {
|
|
scanDir(dir);
|
|
} else {
|
|
articleFiles.push({ path: `${path}/${file}`, id: file });
|
|
}
|
|
}
|
|
}
|
|
scanDir('./articles/article');
|
|
|
|
await db.query('update tag set ref_count = 0');
|
|
db.commit();
|
|
|
|
for (const { path, id } of articleFiles) {
|
|
const res = await db.query('select * from article where id = $1', [id]);
|
|
const compiled = await compile(fs.readFileSync(path, 'utf-8'));
|
|
|
|
const title = compiled.data.fm.title;
|
|
const category = path.split('/')[3];
|
|
const tags: string[] = compiled.data.fm.tags;
|
|
const released_at = new Date(compiled.data.fm.released_at);
|
|
const updated_at = new Date(compiled.data.fm.updated_at);
|
|
const image = compiled.data.fm.image;
|
|
const publish = compiled.data.fm.publish;
|
|
const content = compiled.code
|
|
.replace(/>{@html `<code class="language-/g, '><code class="language-')
|
|
.replace(/<\/code>`}<\/pre>/g, '</code></pre>');
|
|
if (res.rowCount == 0) {
|
|
console.log(`New article: ${id}`);
|
|
await db.query(
|
|
'insert into article (id, title, category, released_at, updated_at, tags, image, publish, content) values ($1, $2, $3, $4, $5, $6, $7, $8, $9)',
|
|
[id, title, category, released_at, updated_at, tags, image, publish, content]
|
|
);
|
|
} else if (res.rows[0].updated_at < updated_at) {
|
|
console.log(`Update article: ${id}`);
|
|
await db.query(
|
|
'update article set title = $2, category = $3, released_at = $4, updated_at = $5, tags = $6, image = $7, publish = $8, content = $9 where id = $1',
|
|
[id, title, category, released_at, updated_at, tags, image, publish, content]
|
|
);
|
|
} else {
|
|
console.log(`Article ${id} is already up-to-date`);
|
|
}
|
|
for (const tag of tags) {
|
|
if ((await db.query('select * from tag where name = $1', [tag])).rowCount == 0) {
|
|
db.query('insert into tag (name, ref_count) values ($1, 1)', [tag]);
|
|
} else {
|
|
db.query('update tag set ref_count = ref_count + 1 where name = $1', [tag]);
|
|
}
|
|
}
|
|
}
|
|
await db.commit();
|
|
|
|
} catch (err) {
|
|
console.error(err);
|
|
await db.rollback();
|
|
} finally {
|
|
await db.release();
|
|
}
|
|
}
|
|
|
|
type ArticleFileItem = {
|
|
path: string,
|
|
id: string,
|
|
}
|
|
export type TableSchema = {
|
|
name: string,
|
|
columns: {
|
|
name: string,
|
|
type: string,
|
|
constraint: string,
|
|
}[],
|
|
} |