diff --git a/src/lib/server/database/init.ts b/src/lib/server/database/init.ts
index f57c908..be20422 100644
--- a/src/lib/server/database/init.ts
+++ b/src/lib/server/database/init.ts
@@ -1,30 +1,25 @@
// initialize
import type { Postgres } from '$lib/server/database';
import fs from 'fs';
-import { execSync } from 'child_process';
import compile from '$lib/server/article';
+import article_git from '$lib/server/aritcle-git';
+
+import { format } from '$lib/scripts/formatted_date';
export default async function init(db: Postgres) {
- if (fs.existsSync('./articles/')) {
- console.log('Pulling articles from git..');
- const stdout = execSync('git -c core.sshCommand="ssh -i ../key -F /dev/null" pull', { cwd: './articles/' });
- console.log(stdout.toString());
- } else {
- console.log('Cloning articles from git..');
- const stdout = execSync('git -c core.sshCommand="ssh -i ./key -F /dev/null" clone git@gitea.hareworks.net:Hare/blog-articles.git articles', { cwd: './' });
- console.log(stdout.toString());
- }
-
- const schemas = [
+ await cloneRepo();
+ await createTable(db, [
{
name: 'article',
columns: [
{ name: 'seq', type: 'serial', constraint: 'primary key' },
{ name: 'id', type: 'text', constraint: 'not null' },
- { name: 'title', type: 'text', constraint: 'not null' },
- { name: 'category', type: 'text', constraint: 'not null' },
{ name: 'released_at', type: 'timestamp', constraint: 'not null' },
{ name: 'updated_at', type: 'timestamp', constraint: 'not null' },
+ { name: 'author', type: 'text', constraint: 'not null' },
+ { name: 'email', type: 'text', constraint: 'not null' },
+ { name: 'title', type: 'text', constraint: 'not null' },
+ { name: 'category', type: 'text', constraint: 'not null' },
{ name: 'tags', type: 'text[]', constraint: 'not null' },
{ name: 'image', type: 'text', constraint: '' },
{ name: 'publish', type: 'text', constraint: 'not null' },
@@ -80,73 +75,49 @@ export default async function init(db: Postgres) {
{ name: 'ref_count', type: 'integer', constraint: 'not null' },
],
}
- ];
-
- await db.begin();
- try {
- // Create tables
- for (const schema of schemas) {
- const res = await db.query(`select * from information_schema.tables where table_name = '${schema.name}'`)
- if (res.rowCount == 0) {
- console.log(`Creating table ${schema.name}`);
- const columnStr = schema.columns.map(c => `${c.name} ${c.type} ${c.constraint}`).join(', ');
- await db.query(`create table ${schema.name} (${columnStr})`);
- } else {
- console.log(`Table ${schema.name} already exists`);
- }
- }
- } catch (err) {
- console.error(err);
- await db.rollback();
- } finally {
- await db.commit();
- }
-
- const articleFiles: ArticleFileItem[] = [];
- function scanDir(path: string) {
- const files = fs.readdirSync(path);
- for (const file of files) {
- const dir = `${path}/${file}`;
- const stat = fs.statSync(dir);
- if (stat.isDirectory()) {
- scanDir(dir);
- } else {
- articleFiles.push({ path: `${path}/${file}`, id: file.replace('.md', '') });
- }
- }
- }
- scanDir('./articles/article');
+ ]);
+ const articleFiles = await crawlArticles(db);
await db.query('update tag set ref_count = 0');
for (const { path, id } of articleFiles) {
+ console.log(`Processing ${id}...`);
await db.begin();
try {
+ const gitlog = await article_git.log({
+ file: path.slice(11),
+ strictDate: true,
+ });
const res = await db.query('select * from article where id = $1', [id]);
- const compiled = await compile(fs.readFileSync(path, 'utf-8'));
- const title = compiled.data.fm.title;
+ const author = gitlog.all[0].author_name;
+ const email = gitlog.all[0].author_email;
+ const released_at = new Date(gitlog.all[0].date);
+ const updated_at = (gitlog.latest === null) ? released_at : new Date(gitlog.latest.date);
+ console.log(`Author: ${author} <${email}>\nReleased at: ${format(released_at)}\nUpdated at: ${format(updated_at)}`);
+
const category = path.split('/')[3];
+ const compiled = await compile(fs.readFileSync(path, 'utf-8'));
+ const title = compiled.data.fm.title;
const tags: string[] = compiled.data.fm.tags;
- const released_at = new Date(compiled.data.fm.released_at);
- const updated_at = (compiled.data.fm.updated_at !== null) ? new Date(compiled.data.fm.updated_at) : released_at;
const image = compiled.data.fm.image;
const publish = compiled.data.fm.publish;
const content = compiled.code
.replace(/>{@html ``}<\/pre>/g, '
')
+
if (res.rowCount == 0) {
console.log(`New article: ${id}`);
await db.query(
- 'insert into article (id, title, category, released_at, updated_at, tags, image, publish, content) values ($1, $2, $3, $4, $5, $6, $7, $8, $9)',
- [id, title, category, released_at, updated_at, tags, image, publish, content]
+ 'insert into article (id, released_at, updated_at, author, email, title, category, tags, image, publish, content) values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)',
+ [id, released_at, updated_at, author, email, title, category, tags, image, publish, content]
);
} else if (res.rows[0].updated_at < updated_at) {
// } else if (true) {
console.log(`Update article: ${id}`);
await db.query(
- 'update article set title = $2, category = $3, released_at = $4, updated_at = $5, tags = $6, image = $7, publish = $8, content = $9 where id = $1',
- [id, title, category, released_at, updated_at, tags, image, publish, content]
+ 'update article set released_at = $2, updated_at = $3, author = $4, email = $5, title = $6, category = $7, tags = $8, image = $9, publish = $10, content = $11 where id = $1',
+ [id, released_at, updated_at, author, email, title, category, tags, image, publish, content]
);
} else {
console.log(`Article ${id} is already up-to-date`);
@@ -162,6 +133,7 @@ export default async function init(db: Postgres) {
console.log(err);
await db.rollback();
} finally {
+ console.log('');
await db.commit();
}
}
@@ -180,4 +152,53 @@ export type TableSchema = {
type: string,
constraint: string,
}[],
+}
+
+const cloneRepo = async () => {
+ if (fs.existsSync('./articles/')) {
+ console.log('Pulling articles from git..');
+ await article_git.pull();
+ } else {
+ console.log('Cloning articles from git..');
+ await article_git.clone('git@gitea.hareworks.net:Hare/blog-articles.git', 'articles');
+ }
+}
+
+const createTable = async (db: Postgres, schemas: TableSchema[]) => {
+ await db.begin();
+ try {
+ for (const schema of schemas) {
+ const res = await db.query(`select * from information_schema.tables where table_name = '${schema.name}'`)
+ if (res.rowCount == 0) {
+ console.log(`Creating table ${schema.name}`);
+ const columnStr = schema.columns.map(c => `${c.name} ${c.type} ${c.constraint}`).join(', ');
+ await db.query(`create table ${schema.name} (${columnStr})`);
+ } else {
+ console.log(`Table ${schema.name} already exists`);
+ }
+ }
+ } catch (err) {
+ console.error(err);
+ await db.rollback();
+ } finally {
+ await db.commit();
+ }
+}
+
+const crawlArticles = async (db: Postgres): Promise => {
+ const articleFiles: ArticleFileItem[] = [];
+ function scanDir(path: string) {
+ const files = fs.readdirSync(path);
+ for (const file of files) {
+ const dir = `${path}/${file}`;
+ const stat = fs.statSync(dir);
+ if (stat.isDirectory()) {
+ scanDir(dir);
+ } else {
+ articleFiles.push({ path: `${path}/${file}`, id: file.replace('.md', '') });
+ }
+ }
+ }
+ scanDir('./articles/article');
+ return articleFiles;
}
\ No newline at end of file