diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000000000000000000000000000000000..c7d9f3332a950355d5a77d85000f05e6f45435ea --- /dev/null +++ b/.gitattributes @@ -0,0 +1,34 @@ +*.7z filter=lfs diff=lfs merge=lfs -text +*.arrow filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.bz2 filter=lfs diff=lfs merge=lfs -text +*.ckpt filter=lfs diff=lfs merge=lfs -text +*.ftz filter=lfs diff=lfs merge=lfs -text +*.gz filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.joblib filter=lfs diff=lfs merge=lfs -text +*.lfs.* filter=lfs diff=lfs merge=lfs -text +*.mlmodel filter=lfs diff=lfs merge=lfs -text +*.model filter=lfs diff=lfs merge=lfs -text +*.msgpack filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text +*.onnx filter=lfs diff=lfs merge=lfs -text +*.ot filter=lfs diff=lfs merge=lfs -text +*.parquet filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.pickle filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pt filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.rar filter=lfs diff=lfs merge=lfs -text +*.safetensors filter=lfs diff=lfs merge=lfs -text +saved_model/**/* filter=lfs diff=lfs merge=lfs -text +*.tar.* filter=lfs diff=lfs merge=lfs -text +*.tflite filter=lfs diff=lfs merge=lfs -text +*.tgz filter=lfs diff=lfs merge=lfs -text +*.wasm filter=lfs diff=lfs merge=lfs -text +*.xz filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.zst filter=lfs diff=lfs merge=lfs -text +*tfevents* filter=lfs diff=lfs merge=lfs -text diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..2fc863718e9eaa6d9d1a2f4f35c1319bd57366f9 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,45 @@ +# Contributing Guidelines + +**Welcome to Chatbot UI!** + +We appreciate your interest in contributing to our project. + +Before you get started, please read our guidelines for contributing. + +## Types of Contributions + +We welcome the following types of contributions: + +- Bug fixes +- New features +- Documentation improvements +- Code optimizations +- Translations +- Tests + +## Getting Started + +To get started, fork the project on GitHub and clone it locally on your machine. Then, create a new branch to work on your changes. + +``` +git clone https://github.com/mckaywrigley/chatbot-ui.git +cd chatbot-ui +git checkout -b my-branch-name + +``` + +Before submitting your pull request, please make sure your changes pass our automated tests and adhere to our code style guidelines. + +## Pull Request Process + +1. Fork the project on GitHub. +2. Clone your forked repository locally on your machine. +3. Create a new branch from the main branch. +4. Make your changes on the new branch. +5. Ensure that your changes adhere to our code style guidelines and pass our automated tests. +6. Commit your changes and push them to your forked repository. +7. Submit a pull request to the main branch of the main repository. + +## Contact + +If you have any questions or need help getting started, feel free to reach out to me on [Twitter](https://twitter.com/mckaywrigley). diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..093f5352f7e811d0515119a0272de7631bade967 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,63 @@ +FROM node:18-alpine AS base + +# Install dependencies only when needed +FROM base AS deps +# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed. +RUN apk add --no-cache libc6-compat +WORKDIR /app + +# Install dependencies based on the preferred package manager +COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./ +RUN \ + if [ -f yarn.lock ]; then yarn --frozen-lockfile; \ + elif [ -f package-lock.json ]; then npm ci; \ + elif [ -f pnpm-lock.yaml ]; then yarn global add pnpm && pnpm i --frozen-lockfile; \ + else echo "Lockfile not found." && exit 1; \ + fi + +# Uncomment the following lines if you want to use a secret at buildtime, +# for example to access your private npm packages +# RUN --mount=type=secret,id=HF_EXAMPLE_SECRET,mode=0444,required=true \ +# $(cat /run/secrets/HF_EXAMPLE_SECRET) + +# Rebuild the source code only when needed +FROM base AS builder +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY . . + +# Next.js collects completely anonymous telemetry data about general usage. +# Learn more here: https://nextjs.org/telemetry +# Uncomment the following line in case you want to disable telemetry during the build. +# ENV NEXT_TELEMETRY_DISABLED 1 + +# RUN yarn build + +# If you use yarn, comment out this line and use the line above +RUN npm run build + +# Production image, copy all the files and run next +FROM base AS runner +WORKDIR /app + +ENV NODE_ENV production +# Uncomment the following line in case you want to disable telemetry during runtime. +# ENV NEXT_TELEMETRY_DISABLED 1 + +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + +COPY --from=builder /app/public ./public + +# Automatically leverage output traces to reduce image size +# https://nextjs.org/docs/advanced-features/output-file-tracing +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static + +USER nextjs + +EXPOSE 3000 + +ENV PORT 3000 + +CMD ["node", "server.js"] \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..8dc4e12dc227a0ffe26ac1769fd9da539e5b438c --- /dev/null +++ b/Makefile @@ -0,0 +1,18 @@ +include .env + +.PHONY: all + +build: + docker build -t chatbot-ui . + +run: + export $(cat .env | xargs) + docker stop chatbot-ui || true && docker rm chatbot-ui || true + docker run --name chatbot-ui --rm -e OPENAI_API_KEY=${OPENAI_API_KEY} -p 3000:3000 chatbot-ui + +logs: + docker logs -f chatbot-ui + +push: + docker tag chatbot-ui:latest ${DOCKER_USER}/chatbot-ui:${DOCKER_TAG} + docker push ${DOCKER_USER}/chatbot-ui:${DOCKER_TAG} \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5f79118b343c77c90de120ce67dc986fdc9ff4e7 --- /dev/null +++ b/README.md @@ -0,0 +1,117 @@ +--- +sdk: docker +app_port: 3000 +title: Chatbot UI +emoji: 💭🤷‍♀️ +colorFrom: pink +colorTo: blue +duplicated_from: matthoffner/chatbot +--- + +Forked chat-bot + +# Chatbot UI + +Chatbot UI is an open source chat UI for AI models. + +See a [demo](https://twitter.com/mckaywrigley/status/1640380021423603713?s=46&t=AowqkodyK6B4JccSOxSPew). + +![Chatbot UI](./public/screenshots/screenshot-0402023.jpg) + +## Updates + +Chatbot UI will be updated over time. + +Expect frequent improvements. + +**Next up:** + +- [ ] Sharing +- [ ] "Bots" + +## Deploy + +**Vercel** + +Host your own live version of Chatbot UI with Vercel. + +[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?repository-url=https%3A%2F%2Fgithub.com%2Fmckaywrigley%2Fchatbot-ui) + +**Docker** + +Build locally: + +```shell +docker build -t chatgpt-ui . +docker run -e OPENAI_API_KEY=xxxxxxxx -p 3000:3000 chatgpt-ui +``` + +Pull from ghcr: + +``` +docker run -e OPENAI_API_KEY=xxxxxxxx -p 3000:3000 ghcr.io/mckaywrigley/chatbot-ui:main +``` + +## Running Locally + +**1. Clone Repo** + +```bash +git clone https://github.com/mckaywrigley/chatbot-ui.git +``` + +**2. Install Dependencies** + +```bash +npm i +``` + +**3. Provide OpenAI API Key** + +Create a .env.local file in the root of the repo with your OpenAI API Key: + +```bash +OPENAI_API_KEY=YOUR_KEY +``` + +> You can set `OPENAI_API_HOST` where access to the official OpenAI host is restricted or unavailable, allowing users to configure an alternative host for their specific needs. + +> Additionally, if you have multiple OpenAI Organizations, you can set `OPENAI_ORGANIZATION` to specify one. + +**4. Run App** + +```bash +npm run dev +``` + +**5. Use It** + +You should be able to start chatting. + +## Configuration + +When deploying the application, the following environment variables can be set: + +| Environment Variable | Default value | Description | +| --------------------------------- | ------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------------- | +| OPENAI_API_KEY | | The default API key used for authentication with OpenAI | +| OPENAI_API_HOST | `https://api.openai.com` | The base url, for Azure use `https://.openai.azure.com` | +| OPENAI_API_TYPE | `openai` | The API type, options are `openai` or `azure` | +| OPENAI_API_VERSION | `2023-03-15-preview` | Only applicable for Azure OpenAI | +| AZURE_DEPLOYMENT_ID | | Needed when Azure OpenAI, Ref [Azure OpenAI API](https://learn.microsoft.com/zh-cn/azure/cognitive-services/openai/reference#completions) | +| OPENAI_ORGANIZATION | | Your OpenAI organization ID | +| DEFAULT_MODEL | `gpt-3.5-turbo` | The default model to use on new conversations, for Azure use `gpt-35-turbo` | +| NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT | [see here](utils/app/const.ts) | The default system prompt to use on new conversations | +| NEXT_PUBLIC_DEFAULT_TEMPERATURE | 1 | The default temperature to use on new conversations | +| GOOGLE_API_KEY | | See [Custom Search JSON API documentation][GCSE] | +| GOOGLE_CSE_ID | | See [Custom Search JSON API documentation][GCSE] | + +If you do not provide an OpenAI API key with `OPENAI_API_KEY`, users will have to provide their own key. + +If you don't have an OpenAI API key, you can get one [here](https://platform.openai.com/account/api-keys). + +## Contact + +If you have any questions, feel free to reach out to Mckay on [Twitter](https://twitter.com/mckaywrigley). + +[GCSE]: https://developers.google.com/custom-search/v1/overview \ No newline at end of file diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000000000000000000000000000000000..42f79949474efbc61815647263aa005708780d22 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,53 @@ +# Security Policy + + +This security policy outlines the process for reporting vulnerabilities and secrets found within this GitHub repository. It is essential that all contributors and users adhere to this policy in order to maintain a secure and stable environment. + +## Reporting a Vulnerability + +If you discover a vulnerability within the code, dependencies, or any other component of this repository, please follow these steps: + +1. **Do not disclose the vulnerability publicly.** Publicly disclosing a vulnerability may put the project at risk and could potentially harm other users. + +2. **Contact the repository maintainer(s) privately.** Send a private message or email to the maintainer(s) with a detailed description of the vulnerability. Include the following information: + + - The affected component(s) + - Steps to reproduce the issue + - Potential impact of the vulnerability + - Any possible mitigations or workarounds + +3. **Wait for a response from the maintainer(s).** Please be patient, as they may need time to investigate and verify the issue. The maintainer(s) should acknowledge receipt of your report and provide an estimated time frame for addressing the vulnerability. + +4. **Cooperate with the maintainer(s).** If requested, provide additional information or assistance to help resolve the issue. + +5. **Do not disclose the vulnerability until the maintainer(s) have addressed it.** Once the issue has been resolved, the maintainer(s) may choose to publicly disclose the vulnerability and credit you for the discovery. + +## Reporting Secrets + +If you discover any secrets, such as API keys or passwords, within the repository, follow these steps: + +1. **Do not share the secret or use it for unauthorized purposes.** Misusing a secret could have severe consequences for the project and its users. + +2. **Contact the repository maintainer(s) privately.** Notify them of the discovered secret, its location, and any potential risks associated with it. + +3. **Wait for a response and further instructions.** + +## Responsible Disclosure + +We encourage responsible disclosure of vulnerabilities and secrets. If you follow the steps outlined in this policy, we will work with you to understand and address the issue. We will not take legal action against individuals who discover and report vulnerabilities or secrets in accordance with this policy. + +## Patching and Updates + +We are committed to maintaining the security of our project. When vulnerabilities are reported and confirmed, we will: + +1. Work diligently to develop and apply a patch or implement a mitigation strategy. +2. Keep the reporter informed about the progress of the fix. +3. Update the repository with the necessary patches and document the changes in the release notes or changelog. +4. Credit the reporter for the discovery, if they wish to be acknowledged. + +## Contributing to Security + +We welcome contributions that help improve the security of our project. If you have suggestions or want to contribute code to address security issues, please follow the standard contribution guidelines for this repository. When submitting a pull request related to security, please mention that it addresses a security issue and provide any necessary context. + +By adhering to this security policy, you contribute to the overall security and stability of the project. Thank you for your cooperation and responsible handling of vulnerabilities and secrets. + diff --git a/__tests__/utils/app/importExports.test.ts b/__tests__/utils/app/importExports.test.ts new file mode 100644 index 0000000000000000000000000000000000000000..aa51cbc054eae6a7921d88f2e894186e82a87739 --- /dev/null +++ b/__tests__/utils/app/importExports.test.ts @@ -0,0 +1,264 @@ +import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from '@/utils/app/const'; +import { + cleanData, + isExportFormatV1, + isExportFormatV2, + isExportFormatV3, + isExportFormatV4, + isLatestExportFormat, +} from '@/utils/app/importExport'; + +import { ExportFormatV1, ExportFormatV2, ExportFormatV4 } from '@/types/export'; +import { OpenAIModelID, OpenAIModels } from '@/types/openai'; + +import { describe, expect, it } from 'vitest'; + +describe('Export Format Functions', () => { + describe('isExportFormatV1', () => { + it('should return true for v1 format', () => { + const obj = [{ id: 1 }]; + expect(isExportFormatV1(obj)).toBe(true); + }); + + it('should return false for non-v1 formats', () => { + const obj = { version: 3, history: [], folders: [] }; + expect(isExportFormatV1(obj)).toBe(false); + }); + }); + + describe('isExportFormatV2', () => { + it('should return true for v2 format', () => { + const obj = { history: [], folders: [] }; + expect(isExportFormatV2(obj)).toBe(true); + }); + + it('should return false for non-v2 formats', () => { + const obj = { version: 3, history: [], folders: [] }; + expect(isExportFormatV2(obj)).toBe(false); + }); + }); + + describe('isExportFormatV3', () => { + it('should return true for v3 format', () => { + const obj = { version: 3, history: [], folders: [] }; + expect(isExportFormatV3(obj)).toBe(true); + }); + + it('should return false for non-v3 formats', () => { + const obj = { version: 4, history: [], folders: [] }; + expect(isExportFormatV3(obj)).toBe(false); + }); + }); + + describe('isExportFormatV4', () => { + it('should return true for v4 format', () => { + const obj = { version: 4, history: [], folders: [], prompts: [] }; + expect(isExportFormatV4(obj)).toBe(true); + }); + + it('should return false for non-v4 formats', () => { + const obj = { version: 5, history: [], folders: [], prompts: [] }; + expect(isExportFormatV4(obj)).toBe(false); + }); + }); +}); + +describe('cleanData Functions', () => { + describe('cleaning v1 data', () => { + it('should return the latest format', () => { + const data = [ + { + id: 1, + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + }, + ] as ExportFormatV1; + const obj = cleanData(data); + expect(isLatestExportFormat(obj)).toBe(true); + expect(obj).toEqual({ + version: 4, + history: [ + { + id: 1, + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + model: OpenAIModels[OpenAIModelID.GPT_3_5], + prompt: DEFAULT_SYSTEM_PROMPT, + temperature: DEFAULT_TEMPERATURE, + folderId: null, + }, + ], + folders: [], + prompts: [], + }); + }); + }); + + describe('cleaning v2 data', () => { + it('should return the latest format', () => { + const data = { + history: [ + { + id: '1', + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + }, + ], + folders: [ + { + id: 1, + name: 'folder 1', + }, + ], + } as ExportFormatV2; + const obj = cleanData(data); + expect(isLatestExportFormat(obj)).toBe(true); + expect(obj).toEqual({ + version: 4, + history: [ + { + id: '1', + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + model: OpenAIModels[OpenAIModelID.GPT_3_5], + prompt: DEFAULT_SYSTEM_PROMPT, + temperature: DEFAULT_TEMPERATURE, + folderId: null, + }, + ], + folders: [ + { + id: '1', + name: 'folder 1', + type: 'chat', + }, + ], + prompts: [], + }); + }); + }); + + describe('cleaning v4 data', () => { + it('should return the latest format', () => { + const data = { + version: 4, + history: [ + { + id: '1', + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + model: OpenAIModels[OpenAIModelID.GPT_3_5], + prompt: DEFAULT_SYSTEM_PROMPT, + temperature: DEFAULT_TEMPERATURE, + folderId: null, + }, + ], + folders: [ + { + id: '1', + name: 'folder 1', + type: 'chat', + }, + ], + prompts: [ + { + id: '1', + name: 'prompt 1', + description: '', + content: '', + model: OpenAIModels[OpenAIModelID.GPT_3_5], + folderId: null, + }, + ], + } as ExportFormatV4; + + const obj = cleanData(data); + expect(isLatestExportFormat(obj)).toBe(true); + expect(obj).toEqual({ + version: 4, + history: [ + { + id: '1', + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + model: OpenAIModels[OpenAIModelID.GPT_3_5], + prompt: DEFAULT_SYSTEM_PROMPT, + temperature: DEFAULT_TEMPERATURE, + folderId: null, + }, + ], + folders: [ + { + id: '1', + name: 'folder 1', + type: 'chat', + }, + ], + prompts: [ + { + id: '1', + name: 'prompt 1', + description: '', + content: '', + model: OpenAIModels[OpenAIModelID.GPT_3_5], + folderId: null, + }, + ], + }); + }); + }); +}); diff --git a/components/Buttons/SidebarActionButton/SidebarActionButton.tsx b/components/Buttons/SidebarActionButton/SidebarActionButton.tsx new file mode 100644 index 0000000000000000000000000000000000000000..2fdc79daa52e183136cd1982f5bc1642b2867714 --- /dev/null +++ b/components/Buttons/SidebarActionButton/SidebarActionButton.tsx @@ -0,0 +1,17 @@ +import { MouseEventHandler, ReactElement } from 'react'; + +interface Props { + handleClick: MouseEventHandler; + children: ReactElement; +} + +const SidebarActionButton = ({ handleClick, children }: Props) => ( + +); + +export default SidebarActionButton; diff --git a/components/Buttons/SidebarActionButton/index.ts b/components/Buttons/SidebarActionButton/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..1fce00e46cd649fea08ed9e7c6c136ac86fe1528 --- /dev/null +++ b/components/Buttons/SidebarActionButton/index.ts @@ -0,0 +1 @@ +export { default } from './SidebarActionButton'; diff --git a/components/Chat/Chat.tsx b/components/Chat/Chat.tsx new file mode 100644 index 0000000000000000000000000000000000000000..a9ac1f28cb10b204f88fcde4332513d5d1a8ec5d --- /dev/null +++ b/components/Chat/Chat.tsx @@ -0,0 +1,487 @@ +import { IconClearAll, IconSettings } from '@tabler/icons-react'; +import { + MutableRefObject, + memo, + useCallback, + useContext, + useEffect, + useRef, + useState, +} from 'react'; +import toast from 'react-hot-toast'; + +import { useTranslation } from 'next-i18next'; + +import { getEndpoint } from '@/utils/app/api'; +import { + saveConversation, + saveConversations, + updateConversation, +} from '@/utils/app/conversation'; +import { throttle } from '@/utils/data/throttle'; + +import { ChatBody, Conversation, Message } from '@/types/chat'; +import { Plugin } from '@/types/plugin'; + +import HomeContext from '@/pages/api/home/home.context'; + +import { ChatInput } from './ChatInput'; +import { ChatLoader } from './ChatLoader'; +import { ErrorMessageDiv } from './ErrorMessageDiv'; +import { ModelSelect } from './ModelSelect'; +import { SystemPrompt } from './SystemPrompt'; +import { TemperatureSlider } from './Temperature'; +import { MemoizedChatMessage } from './MemoizedChatMessage'; + +interface Props { + stopConversationRef: MutableRefObject; +} + +export const Chat = memo(({ stopConversationRef }: Props) => { + const { t } = useTranslation('chat'); + + const { + state: { + selectedConversation, + conversations, + models, + apiKey, + pluginKeys, + serverSideApiKeyIsSet, + messageIsStreaming, + modelError, + loading, + prompts, + }, + handleUpdateConversation, + dispatch: homeDispatch, + } = useContext(HomeContext); + + const [currentMessage, setCurrentMessage] = useState(); + const [autoScrollEnabled, setAutoScrollEnabled] = useState(true); + const [showSettings, setShowSettings] = useState(false); + const [showScrollDownButton, setShowScrollDownButton] = + useState(false); + + const messagesEndRef = useRef(null); + const chatContainerRef = useRef(null); + const textareaRef = useRef(null); + + const handleSend = useCallback( + async (message: Message, deleteCount = 0, plugin: Plugin | null = null) => { + if (selectedConversation) { + let updatedConversation: Conversation; + if (deleteCount) { + const updatedMessages = [...selectedConversation.messages]; + for (let i = 0; i < deleteCount; i++) { + updatedMessages.pop(); + } + updatedConversation = { + ...selectedConversation, + messages: [...updatedMessages, message], + }; + } else { + updatedConversation = { + ...selectedConversation, + messages: [...selectedConversation.messages, message], + }; + } + homeDispatch({ + field: 'selectedConversation', + value: updatedConversation, + }); + homeDispatch({ field: 'loading', value: true }); + homeDispatch({ field: 'messageIsStreaming', value: true }); + const chatBody: ChatBody = { + model: updatedConversation.model, + messages: updatedConversation.messages, + key: apiKey, + prompt: updatedConversation.prompt, + temperature: updatedConversation.temperature, + }; + const endpoint = getEndpoint(plugin); + let body; + if (!plugin) { + body = JSON.stringify(chatBody); + } else { + body = JSON.stringify({ + ...chatBody, + googleAPIKey: pluginKeys + .find((key) => key.pluginId === 'google-search') + ?.requiredKeys.find((key) => key.key === 'GOOGLE_API_KEY')?.value, + googleCSEId: pluginKeys + .find((key) => key.pluginId === 'google-search') + ?.requiredKeys.find((key) => key.key === 'GOOGLE_CSE_ID')?.value, + }); + } + const controller = new AbortController(); + const response = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + signal: controller.signal, + body, + }); + if (!response.ok) { + homeDispatch({ field: 'loading', value: false }); + homeDispatch({ field: 'messageIsStreaming', value: false }); + toast.error(response.statusText); + return; + } + const data = response.body; + if (!data) { + homeDispatch({ field: 'loading', value: false }); + homeDispatch({ field: 'messageIsStreaming', value: false }); + return; + } + if (!plugin) { + if (updatedConversation.messages.length === 1) { + const { content } = message; + const customName = + content.length > 30 ? content.substring(0, 30) + '...' : content; + updatedConversation = { + ...updatedConversation, + name: customName, + }; + } + homeDispatch({ field: 'loading', value: false }); + const reader = data.getReader(); + const decoder = new TextDecoder(); + let done = false; + let isFirst = true; + let text = ''; + while (!done) { + if (stopConversationRef.current === true) { + controller.abort(); + done = true; + break; + } + const { value, done: doneReading } = await reader.read(); + done = doneReading; + const chunkValue = decoder.decode(value); + text += chunkValue; + if (isFirst) { + isFirst = false; + const updatedMessages: Message[] = [ + ...updatedConversation.messages, + { role: 'assistant', content: chunkValue }, + ]; + updatedConversation = { + ...updatedConversation, + messages: updatedMessages, + }; + homeDispatch({ + field: 'selectedConversation', + value: updatedConversation, + }); + } else { + const updatedMessages: Message[] = + updatedConversation.messages.map((message, index) => { + if (index === updatedConversation.messages.length - 1) { + return { + ...message, + content: text, + }; + } + return message; + }); + updatedConversation = { + ...updatedConversation, + messages: updatedMessages, + }; + homeDispatch({ + field: 'selectedConversation', + value: updatedConversation, + }); + } + } + saveConversation(updatedConversation); + const updatedConversations: Conversation[] = conversations.map( + (conversation) => { + if (conversation.id === selectedConversation.id) { + return updatedConversation; + } + return conversation; + }, + ); + if (updatedConversations.length === 0) { + updatedConversations.push(updatedConversation); + } + homeDispatch({ field: 'conversations', value: updatedConversations }); + saveConversations(updatedConversations); + homeDispatch({ field: 'messageIsStreaming', value: false }); + } else { + const { answer } = await response.json(); + const updatedMessages: Message[] = [ + ...updatedConversation.messages, + { role: 'assistant', content: answer }, + ]; + updatedConversation = { + ...updatedConversation, + messages: updatedMessages, + }; + homeDispatch({ + field: 'selectedConversation', + value: updateConversation, + }); + saveConversation(updatedConversation); + const updatedConversations: Conversation[] = conversations.map( + (conversation) => { + if (conversation.id === selectedConversation.id) { + return updatedConversation; + } + return conversation; + }, + ); + if (updatedConversations.length === 0) { + updatedConversations.push(updatedConversation); + } + homeDispatch({ field: 'conversations', value: updatedConversations }); + saveConversations(updatedConversations); + homeDispatch({ field: 'loading', value: false }); + homeDispatch({ field: 'messageIsStreaming', value: false }); + } + } + }, + [ + apiKey, + conversations, + pluginKeys, + selectedConversation, + stopConversationRef, + ], + ); + + const scrollToBottom = useCallback(() => { + if (autoScrollEnabled) { + messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); + textareaRef.current?.focus(); + } + }, [autoScrollEnabled]); + + const handleScroll = () => { + if (chatContainerRef.current) { + const { scrollTop, scrollHeight, clientHeight } = + chatContainerRef.current; + const bottomTolerance = 30; + + if (scrollTop + clientHeight < scrollHeight - bottomTolerance) { + setAutoScrollEnabled(false); + setShowScrollDownButton(true); + } else { + setAutoScrollEnabled(true); + setShowScrollDownButton(false); + } + } + }; + + const handleScrollDown = () => { + chatContainerRef.current?.scrollTo({ + top: chatContainerRef.current.scrollHeight, + behavior: 'smooth', + }); + }; + + const handleSettings = () => { + setShowSettings(!showSettings); + }; + + const onClearAll = () => { + if ( + confirm(t('Are you sure you want to clear all messages?')) && + selectedConversation + ) { + handleUpdateConversation(selectedConversation, { + key: 'messages', + value: [], + }); + } + }; + + const scrollDown = () => { + if (autoScrollEnabled) { + messagesEndRef.current?.scrollIntoView(true); + } + }; + const throttledScrollDown = throttle(scrollDown, 250); + + // useEffect(() => { + // console.log('currentMessage', currentMessage); + // if (currentMessage) { + // handleSend(currentMessage); + // homeDispatch({ field: 'currentMessage', value: undefined }); + // } + // }, [currentMessage]); + + useEffect(() => { + throttledScrollDown(); + selectedConversation && + setCurrentMessage( + selectedConversation.messages[selectedConversation.messages.length - 2], + ); + }, [selectedConversation, throttledScrollDown]); + + useEffect(() => { + const observer = new IntersectionObserver( + ([entry]) => { + setAutoScrollEnabled(entry.isIntersecting); + if (entry.isIntersecting) { + textareaRef.current?.focus(); + } + }, + { + root: null, + threshold: 0.5, + }, + ); + const messagesEndElement = messagesEndRef.current; + if (messagesEndElement) { + observer.observe(messagesEndElement); + } + return () => { + if (messagesEndElement) { + observer.unobserve(messagesEndElement); + } + }; + }, [messagesEndRef]); + + return ( +
+ {!(apiKey || serverSideApiKeyIsSet) ? ( +
+
+ Welcome to Chatbot UI +
+
+
{`Chatbot UI is an open source clone of OpenAI's ChatGPT UI.`}
+
+ Important: Chatbot UI is 100% unaffiliated with OpenAI. +
+
+
+
+ Chatbot UI allows you to plug in your base url to use this UI with + your API. +
+
+ It is only used to communicate + with your API. +
+
+
+ ) : modelError ? ( + + ) : ( + <> +
+ {selectedConversation?.messages.length === 0 ? ( + <> +
+
+ Chatbot UI +
+ + {models.length > 0 && ( +
+ + + + handleUpdateConversation(selectedConversation, { + key: 'prompt', + value: prompt, + }) + } + /> + + + handleUpdateConversation(selectedConversation, { + key: 'temperature', + value: temperature, + }) + } + /> +
+ )} +
+ + ) : ( + <> +
+ + +
+ {showSettings && ( +
+
+ +
+
+ )} + + {selectedConversation?.messages.map((message, index) => ( + { + setCurrentMessage(editedMessage); + // discard edited message and the ones that come after then resend + handleSend( + editedMessage, + selectedConversation?.messages.length - index, + ); + }} + /> + ))} + + {loading && } + +
+ + )} +
+ + { + setCurrentMessage(message); + handleSend(message, 0, plugin); + }} + onScrollDownClick={handleScrollDown} + onRegenerate={() => { + if (currentMessage) { + handleSend(currentMessage, 2, null); + } + }} + showScrollDownButton={showScrollDownButton} + /> + + )} +
+ ); +}); +Chat.displayName = 'Chat'; diff --git a/components/Chat/ChatInput.tsx b/components/Chat/ChatInput.tsx new file mode 100644 index 0000000000000000000000000000000000000000..bb599e8079f38a8af62e293e86847385e73f973f --- /dev/null +++ b/components/Chat/ChatInput.tsx @@ -0,0 +1,394 @@ +import { + IconArrowDown, + IconBolt, + IconBrandGoogle, + IconPlayerStop, + IconRepeat, + IconSend, +} from '@tabler/icons-react'; +import { + KeyboardEvent, + MutableRefObject, + useCallback, + useContext, + useEffect, + useRef, + useState, +} from 'react'; + +import { useTranslation } from 'next-i18next'; + +import { Message } from '@/types/chat'; +import { Plugin } from '@/types/plugin'; +import { Prompt } from '@/types/prompt'; + +import HomeContext from '@/pages/api/home/home.context'; + +import { PluginSelect } from './PluginSelect'; +import { PromptList } from './PromptList'; +import { VariableModal } from './VariableModal'; + +interface Props { + onSend: (message: Message, plugin: Plugin | null) => void; + onRegenerate: () => void; + onScrollDownClick: () => void; + stopConversationRef: MutableRefObject; + textareaRef: MutableRefObject; + showScrollDownButton: boolean; +} + +export const ChatInput = ({ + onSend, + onRegenerate, + onScrollDownClick, + stopConversationRef, + textareaRef, + showScrollDownButton, +}: Props) => { + const { t } = useTranslation('chat'); + + const { + state: { selectedConversation, messageIsStreaming, prompts }, + + dispatch: homeDispatch, + } = useContext(HomeContext); + + const [content, setContent] = useState(); + const [isTyping, setIsTyping] = useState(false); + const [showPromptList, setShowPromptList] = useState(false); + const [activePromptIndex, setActivePromptIndex] = useState(0); + const [promptInputValue, setPromptInputValue] = useState(''); + const [variables, setVariables] = useState([]); + const [isModalVisible, setIsModalVisible] = useState(false); + const [showPluginSelect, setShowPluginSelect] = useState(false); + const [plugin, setPlugin] = useState(null); + + const promptListRef = useRef(null); + + const filteredPrompts = prompts.filter((prompt) => + prompt.name.toLowerCase().includes(promptInputValue.toLowerCase()), + ); + + const handleChange = (e: React.ChangeEvent) => { + const value = e.target.value; + const maxLength = selectedConversation?.model.maxLength; + + if (maxLength && value.length > maxLength) { + alert( + t( + `Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.`, + { maxLength, valueLength: value.length }, + ), + ); + return; + } + + setContent(value); + updatePromptListVisibility(value); + }; + + const handleSend = () => { + if (messageIsStreaming) { + return; + } + + if (!content) { + alert(t('Please enter a message')); + return; + } + + onSend({ role: 'user', content }, plugin); + setContent(''); + setPlugin(null); + + if (window.innerWidth < 640 && textareaRef && textareaRef.current) { + textareaRef.current.blur(); + } + }; + + const handleStopConversation = () => { + stopConversationRef.current = true; + setTimeout(() => { + stopConversationRef.current = false; + }, 1000); + }; + + const isMobile = () => { + const userAgent = + typeof window.navigator === 'undefined' ? '' : navigator.userAgent; + const mobileRegex = + /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini|Mobile|mobile|CriOS/i; + return mobileRegex.test(userAgent); + }; + + const handleInitModal = () => { + const selectedPrompt = filteredPrompts[activePromptIndex]; + if (selectedPrompt) { + setContent((prevContent) => { + const newContent = prevContent?.replace( + /\/\w*$/, + selectedPrompt.content, + ); + return newContent; + }); + handlePromptSelect(selectedPrompt); + } + setShowPromptList(false); + }; + + const handleKeyDown = (e: KeyboardEvent) => { + if (showPromptList) { + if (e.key === 'ArrowDown') { + e.preventDefault(); + setActivePromptIndex((prevIndex) => + prevIndex < prompts.length - 1 ? prevIndex + 1 : prevIndex, + ); + } else if (e.key === 'ArrowUp') { + e.preventDefault(); + setActivePromptIndex((prevIndex) => + prevIndex > 0 ? prevIndex - 1 : prevIndex, + ); + } else if (e.key === 'Tab') { + e.preventDefault(); + setActivePromptIndex((prevIndex) => + prevIndex < prompts.length - 1 ? prevIndex + 1 : 0, + ); + } else if (e.key === 'Enter') { + e.preventDefault(); + handleInitModal(); + } else if (e.key === 'Escape') { + e.preventDefault(); + setShowPromptList(false); + } else { + setActivePromptIndex(0); + } + } else if (e.key === 'Enter' && !isTyping && !isMobile() && !e.shiftKey) { + e.preventDefault(); + handleSend(); + } else if (e.key === '/' && e.metaKey) { + e.preventDefault(); + setShowPluginSelect(!showPluginSelect); + } + }; + + const parseVariables = (content: string) => { + const regex = /{{(.*?)}}/g; + const foundVariables = []; + let match; + + while ((match = regex.exec(content)) !== null) { + foundVariables.push(match[1]); + } + + return foundVariables; + }; + + const updatePromptListVisibility = useCallback((text: string) => { + const match = text.match(/\/\w*$/); + + if (match) { + setShowPromptList(true); + setPromptInputValue(match[0].slice(1)); + } else { + setShowPromptList(false); + setPromptInputValue(''); + } + }, []); + + const handlePromptSelect = (prompt: Prompt) => { + const parsedVariables = parseVariables(prompt.content); + setVariables(parsedVariables); + + if (parsedVariables.length > 0) { + setIsModalVisible(true); + } else { + setContent((prevContent) => { + const updatedContent = prevContent?.replace(/\/\w*$/, prompt.content); + return updatedContent; + }); + updatePromptListVisibility(prompt.content); + } + }; + + const handleSubmit = (updatedVariables: string[]) => { + const newContent = content?.replace(/{{(.*?)}}/g, (match, variable) => { + const index = variables.indexOf(variable); + return updatedVariables[index]; + }); + + setContent(newContent); + + if (textareaRef && textareaRef.current) { + textareaRef.current.focus(); + } + }; + + useEffect(() => { + if (promptListRef.current) { + promptListRef.current.scrollTop = activePromptIndex * 30; + } + }, [activePromptIndex]); + + useEffect(() => { + if (textareaRef && textareaRef.current) { + textareaRef.current.style.height = 'inherit'; + textareaRef.current.style.height = `${textareaRef.current?.scrollHeight}px`; + textareaRef.current.style.overflow = `${ + textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden' + }`; + } + }, [content]); + + useEffect(() => { + const handleOutsideClick = (e: MouseEvent) => { + if ( + promptListRef.current && + !promptListRef.current.contains(e.target as Node) + ) { + setShowPromptList(false); + } + }; + + window.addEventListener('click', handleOutsideClick); + + return () => { + window.removeEventListener('click', handleOutsideClick); + }; + }, []); + + return ( +
+
+ {messageIsStreaming && ( + + )} + + {!messageIsStreaming && + selectedConversation && + selectedConversation.messages.length > 0 && ( + + )} + +
+ + + {showPluginSelect && ( +
+ { + if (e.key === 'Escape') { + e.preventDefault(); + setShowPluginSelect(false); + textareaRef.current?.focus(); + } + }} + onPluginChange={(plugin: Plugin) => { + setPlugin(plugin); + setShowPluginSelect(false); + + if (textareaRef && textareaRef.current) { + textareaRef.current.focus(); + } + }} + /> +
+ )} + +