diff --git a/__tests__/unit-tests/extract-pagragraphs.test.ts b/__tests__/unit-tests/extract-pagragraphs.test.ts index fa147b0..1f10888 100644 --- a/__tests__/unit-tests/extract-pagragraphs.test.ts +++ b/__tests__/unit-tests/extract-pagragraphs.test.ts @@ -1,4 +1,4 @@ -import { extractParagraphs } from '@/lib/utils'; +import { extractParagraphs } from '@/app/_lib/utils'; describe('extract paragraphs only from markdown', () => { it('should return paragraph', () => { diff --git a/__tests__/unit-tests/truncate.test.ts b/__tests__/unit-tests/truncate.test.ts index de832d0..ebfcf7c 100644 --- a/__tests__/unit-tests/truncate.test.ts +++ b/__tests__/unit-tests/truncate.test.ts @@ -1,4 +1,4 @@ -import { truncate } from '@/lib/utils'; +import { truncate } from '@/app/_lib/utils'; describe('truncate char', () => { const exampleStr = 'Step into Moonlitgrace'; diff --git a/app/(routes)/(main)/blog/[slug]/page.tsx b/app/(routes)/(main)/blog/[slug]/page.tsx index 99da77a..de94bc7 100644 --- a/app/(routes)/(main)/blog/[slug]/page.tsx +++ b/app/(routes)/(main)/blog/[slug]/page.tsx @@ -1,5 +1,5 @@ import { PostSelect } from '@/db/schema'; -import { extractParagraphs, formatDate, truncate } from '@/lib/utils'; +import { formatDate } from '@/lib/utils'; import { marked, Tokens } from 'marked'; import { Metadata } from 'next'; import Image from 'next/image'; @@ -29,10 +29,10 @@ export async function generateMetadata({ }, }; - const post = await res.json().then((res) => res.data); - - const { title, content, cover, slug, tag, createdAt } = post; - const description = truncate(extractParagraphs(content), 160); + const post: Omit = await res + .json() + .then((res) => res.data); + const { title, description, cover, slug, tag, createdAt } = post; // og: dynamic image const ogImgUrl = new URL(process.env.NEXT_PUBLIC_APP_URL + '/api/og'); @@ -63,9 +63,7 @@ export async function generateMetadata({ } export default async function Page({ params }: { params: { slug: string } }) { - const postData: PostSelect = await fetch( - `${process.env.NEXT_PUBLIC_APP_URL}/api/blog/${params.slug}`, - ) + const post: PostSelect = await fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/blog/${params.slug}`) .then((res) => { if (res.status === 404) notFound(); return res.json(); @@ -73,7 +71,7 @@ export default async function Page({ params }: { params: { slug: string } }) { .then((res) => res.data); const lexer = new marked.Lexer(); - const tokens = lexer.lex(postData.content); + const tokens = lexer.lex(post.content); const headings = tokens .filter((token) => token.type === 'heading') .map((token) => (token as Tokens.Heading).text); @@ -82,14 +80,14 @@ export default async function Page({ params }: { params: { slug: string } }) { <>

- {formatDate(postData.createdAt)} + {formatDate(post.createdAt)}

-

{postData.title}

- {postData.tag} - {postData.cover && ( +

{post.title}

+ {post.tag} + {post.cover && ( {postData.title} )}
- + {headings.length > 0 && } ); diff --git a/app/(routes)/(main)/blog/page.tsx b/app/(routes)/(main)/blog/page.tsx index c6254bc..30358b9 100644 --- a/app/(routes)/(main)/blog/page.tsx +++ b/app/(routes)/(main)/blog/page.tsx @@ -1,6 +1,6 @@ import { Badge } from '@/components/ui/badge'; import { PostSelect } from '@/db/schema'; -import { cn, extractParagraphs, formatDate, truncate } from '@/lib/utils'; +import { cn, formatDate } from '@/lib/utils'; import Link from 'next/link'; import { Metadata } from 'next'; import Image from 'next/image'; @@ -64,9 +64,7 @@ export default async function BlogPage() { {post.title} -

- {truncate(extractParagraphs(post.content), 100)} -

+

{post.description}

))} diff --git a/app/_lib/utils.ts b/app/_lib/utils.ts new file mode 100644 index 0000000..fd5680c --- /dev/null +++ b/app/_lib/utils.ts @@ -0,0 +1,17 @@ +import { stripHtmlTags } from '@/lib/utils'; +import { marked, Tokens } from 'marked'; + +export function extractParagraphs(markdown: string) { + const tokens = marked.lexer(markdown); + const paragraphs = tokens + .filter((token) => token.type === 'paragraph') + .map((token) => { + const rawText = marked.parseInline((token as Tokens.Paragraph).text, { async: false }); + return stripHtmlTags(rawText); + }); + return paragraphs.join(' '); +} + +export function truncate(str: string, n: number) { + return str.length > n ? str.slice(0, n - 3) + '...' : str; +} diff --git a/app/api/blog/route.ts b/app/api/blog/route.ts index 0c6d60a..e60b3d8 100644 --- a/app/api/blog/route.ts +++ b/app/api/blog/route.ts @@ -1,14 +1,26 @@ import { db } from '@/db'; import { posts, PostSelect } from '@/db/schema'; +import { extractParagraphs, truncate } from '@/app/_lib/utils'; import { AdminBlogData } from '@/zod_schemas/admin'; import { desc, eq } from 'drizzle-orm'; import { NextRequest, NextResponse } from 'next/server'; import slugify from 'slugify'; -export async function GET(_req: NextRequest) { - console.log('API called'); +export async function GET(_request: NextRequest) { try { - const postsData: PostSelect[] = await db.select().from(posts).orderBy(desc(posts.createdAt)); + const postsData: Omit[] = await db + .select({ + id: posts.id, + title: posts.title, + slug: posts.slug, + tag: posts.tag, + cover: posts.cover, + description: posts.description, + createdAt: posts.createdAt, + draft: posts.draft, + }) + .from(posts) + .orderBy(desc(posts.createdAt)); return NextResponse.json({ data: postsData, message: 'success' }); } catch (err) { @@ -29,6 +41,7 @@ export async function POST(request: NextRequest) { title: data.title, tag: data.tag, content: data.content, + description: truncate(extractParagraphs(data.content), 160), slug: slugify(data.title.toLowerCase()), ...(data.cover && { cover: data.cover }), draft: data.draft, @@ -41,6 +54,7 @@ export async function POST(request: NextRequest) { title: data.title, tag: data.tag, content: data.content, + description: truncate(extractParagraphs(data.content), 160), slug: slugify(data.title.toLowerCase()), ...(data.cover && { cover: data.cover }), draft: data.draft, diff --git a/db/migrations/0000_shocking_absorbing_man.sql b/db/migrations/0000_silent_smasher.sql similarity index 57% rename from db/migrations/0000_shocking_absorbing_man.sql rename to db/migrations/0000_silent_smasher.sql index 98b9fb0..6c30d44 100644 --- a/db/migrations/0000_shocking_absorbing_man.sql +++ b/db/migrations/0000_silent_smasher.sql @@ -5,5 +5,7 @@ CREATE TABLE IF NOT EXISTS "posts" ( "tag" text NOT NULL, "cover" text, "content" text NOT NULL, - "created_at" timestamp DEFAULT now() NOT NULL + "description" text DEFAULT 'default:' NOT NULL, + "created_at" timestamp DEFAULT now() NOT NULL, + "draft" boolean DEFAULT false NOT NULL ); diff --git a/db/migrations/0001_slow_scarlet_spider.sql b/db/migrations/0001_slow_scarlet_spider.sql deleted file mode 100644 index b3459e9..0000000 --- a/db/migrations/0001_slow_scarlet_spider.sql +++ /dev/null @@ -1 +0,0 @@ -ALTER TABLE "posts" ADD COLUMN "draft" boolean DEFAULT false NOT NULL; \ No newline at end of file diff --git a/db/migrations/0001_stormy_energizer.sql b/db/migrations/0001_stormy_energizer.sql new file mode 100644 index 0000000..cfe0444 --- /dev/null +++ b/db/migrations/0001_stormy_energizer.sql @@ -0,0 +1 @@ +ALTER TABLE "posts" ALTER COLUMN "description" DROP DEFAULT; \ No newline at end of file diff --git a/db/migrations/meta/0000_snapshot.json b/db/migrations/meta/0000_snapshot.json index f9b676f..0246c4d 100644 --- a/db/migrations/meta/0000_snapshot.json +++ b/db/migrations/meta/0000_snapshot.json @@ -1,5 +1,5 @@ { - "id": "a0e38a8e-5c92-4669-9f13-789788513011", + "id": "04cf5614-0ac7-4007-8d19-a50f67424b29", "prevId": "00000000-0000-0000-0000-000000000000", "version": "7", "dialect": "postgresql", @@ -44,12 +44,26 @@ "primaryKey": false, "notNull": true }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": true, + "default": "'default:'" + }, "created_at": { "name": "created_at", "type": "timestamp", "primaryKey": false, "notNull": true, "default": "now()" + }, + "draft": { + "name": "draft", + "type": "boolean", + "primaryKey": false, + "notNull": true, + "default": false } }, "indexes": {}, diff --git a/db/migrations/meta/0001_snapshot.json b/db/migrations/meta/0001_snapshot.json index 3f3cf2d..212eb9d 100644 --- a/db/migrations/meta/0001_snapshot.json +++ b/db/migrations/meta/0001_snapshot.json @@ -1,6 +1,6 @@ { - "id": "bfb47a84-8aa3-4498-a3c3-d57b380d926b", - "prevId": "a0e38a8e-5c92-4669-9f13-789788513011", + "id": "013ff335-d84d-4aa4-bcd6-5da5816bc397", + "prevId": "04cf5614-0ac7-4007-8d19-a50f67424b29", "version": "7", "dialect": "postgresql", "tables": { @@ -44,6 +44,12 @@ "primaryKey": false, "notNull": true }, + "description": { + "name": "description", + "type": "text", + "primaryKey": false, + "notNull": true + }, "created_at": { "name": "created_at", "type": "timestamp", diff --git a/db/migrations/meta/_journal.json b/db/migrations/meta/_journal.json index eb1993c..08feaa5 100644 --- a/db/migrations/meta/_journal.json +++ b/db/migrations/meta/_journal.json @@ -5,15 +5,15 @@ { "idx": 0, "version": "7", - "when": 1725340620008, - "tag": "0000_shocking_absorbing_man", + "when": 1728442840815, + "tag": "0000_silent_smasher", "breakpoints": true }, { "idx": 1, "version": "7", - "when": 1726658878773, - "tag": "0001_slow_scarlet_spider", + "when": 1728443335073, + "tag": "0001_stormy_energizer", "breakpoints": true } ] diff --git a/db/schema.ts b/db/schema.ts index df3668d..8189ed0 100644 --- a/db/schema.ts +++ b/db/schema.ts @@ -7,6 +7,7 @@ export const posts = pgTable('posts', { tag: text('tag').notNull(), cover: text('cover'), content: text('content').notNull(), + description: text('description').notNull(), createdAt: timestamp('created_at').notNull().defaultNow(), draft: boolean('draft').notNull().default(false), }); diff --git a/lib/utils.ts b/lib/utils.ts index e75296c..0038201 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -1,6 +1,5 @@ import { type ClassValue, clsx } from 'clsx'; import { twMerge } from 'tailwind-merge'; -import { marked, Tokens } from 'marked'; export function cn(...inputs: ClassValue[]) { return twMerge(clsx(inputs)); @@ -27,22 +26,6 @@ export function stripHtmlTags(html: string) { return html.replace(/<[^>]*>/g, ''); } -export function extractParagraphs(markdown: string) { - const tokens = marked.lexer(markdown); - const paragraphs = tokens - .filter((token) => token.type === 'paragraph') - .map((token) => { - const rawText = marked.parseInline((token as Tokens.Paragraph).text, { async: false }); - return stripHtmlTags(rawText); - }); - return paragraphs.join(' '); -} - -export function truncate(str: string, n: number) { - return str.length > n ? str.slice(0, n - 3) + '...' : str; -} - -// TODO: add test for this function export function arrayBufferToBase64(buffer: ArrayBuffer): string { let binary = ''; const bytes = new Uint8Array(buffer); diff --git a/package.json b/package.json index 826436f..d71bfd1 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,7 @@ "db:generate": "drizzle-kit generate --config=./drizzle.config.ts", "db:migrate": "drizzle-kit migrate --config=./drizzle.config.ts", "db:migrate:prod": "cross-env NODE_ENV=production dotenvx run -f .env.production.local -- drizzle-kit migrate --config=./drizzle.config.ts", + "db:push": "drizzle-kit push --config=./drizzle.config.ts", "db:studio": "drizzle-kit studio --config=./drizzle.config.ts", "prepare": "husky" },