diff --git a/README.md b/README.md index e852ac167..9884fa369 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ - [Next.js](https://nextjs.org) App Router - React Server Components (RSCs), Suspense, and Server Actions - [Vercel AI SDK](https://sdk.vercel.ai/docs) for streaming chat UI -- Support for OpenAI (default), Anthropic, Hugging Face, or custom AI chat models and/or LangChain +- Support for OpenAI (default), Anthropic, Cohere, Hugging Face, or custom AI chat models and/or LangChain - [shadcn/ui](https://ui.shadcn.com) - Styling with [Tailwind CSS](https://tailwindcss.com) - [Radix UI](https://radix-ui.com) for headless component primitives @@ -31,7 +31,7 @@ ## Model Providers -This template ships with OpenAI `gpt-3.5-turbo` as the default. However, thanks to the [Vercel AI SDK](https://sdk.vercel.ai/docs), you can switch LLM providers to [Anthropic](https://anthropic.com), [Hugging Face](https://huggingface.co), or using [LangChain](https://js.langchain.com) with just a few lines of code. +This template ships with OpenAI `gpt-3.5-turbo` as the default. However, thanks to the [Vercel AI SDK](https://sdk.vercel.ai/docs), you can switch LLM providers to [Anthropic](https://anthropic.com), [Cohere](https://cohere.com/), [Hugging Face](https://huggingface.co), or using [LangChain](https://js.langchain.com) with just a few lines of code. ## Deploy Your Own diff --git a/components/chat.tsx b/components/chat.tsx index a9ecafe05..a4c60ccbd 100644 --- a/components/chat.tsx +++ b/components/chat.tsx @@ -20,6 +20,7 @@ import { useState } from 'react' import { Button } from './ui/button' import { Input } from './ui/input' import { toast } from 'react-hot-toast' +import { usePathname, useRouter } from 'next/navigation' const IS_PREVIEW = process.env.VERCEL_ENV === 'preview' export interface ChatProps extends React.ComponentProps<'div'> { @@ -28,6 +29,8 @@ export interface ChatProps extends React.ComponentProps<'div'> { } export function Chat({ id, initialMessages, className }: ChatProps) { + const router = useRouter() + const path = usePathname() const [previewToken, setPreviewToken] = useLocalStorage( 'ai-token', null @@ -46,6 +49,12 @@ export function Chat({ id, initialMessages, className }: ChatProps) { if (response.status === 401) { toast.error(response.statusText) } + }, + onFinish() { + if (!path.includes('chat')) { + router.push(`/chat/${id}`, { shallow: true }) + router.refresh() + } } }) return ( diff --git a/components/ui/codeblock.tsx b/components/ui/codeblock.tsx index ccbf82d80..3a61fd464 100644 --- a/components/ui/codeblock.tsx +++ b/components/ui/codeblock.tsx @@ -128,6 +128,9 @@ const CodeBlock: FC = memo(({ language, value }) => { background: 'transparent', padding: '1.5rem 1rem' }} + lineNumberStyle={{ + userSelect: "none", + }} codeTagProps={{ style: { fontSize: '0.9rem',