Ashhar
commited on
Commit
·
c343876
1
Parent(s):
d089666
official commit from code
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .env.example +11 -0
- .github/ISSUE_TEMPLATE/bug_report.yml +2 -2
- .github/ISSUE_TEMPLATE/epic.md +23 -0
- .github/ISSUE_TEMPLATE/feature.md +28 -0
- .github/workflows/docker.yaml +5 -0
- CONTRIBUTING.md +2 -2
- PROJECT.md +57 -0
- README.md +41 -25
- app/components/chat/APIKeyManager.tsx +126 -72
- app/components/chat/BaseChat.tsx +28 -56
- app/components/chat/Chat.client.tsx +34 -29
- app/components/chat/chatExportAndImport/ImportButtons.tsx +13 -5
- app/components/git/GitUrlImport.client.tsx +141 -141
- app/components/settings/data/DataTab.tsx +84 -1
- app/components/ui/LoadingOverlay.tsx +32 -32
- app/components/workbench/Preview.tsx +142 -58
- app/lib/.server/llm/stream-text.ts +300 -300
- app/lib/api/cookies.ts +33 -0
- app/lib/hooks/useEditChatDescription.ts +2 -2
- app/lib/hooks/useGit.ts +325 -325
- app/lib/modules/llm/base-provider.ts +1 -1
- app/lib/modules/llm/manager.ts +1 -1
- app/lib/modules/llm/providers/amazon-bedrock.ts +113 -0
- app/lib/modules/llm/providers/anthropic.ts +58 -58
- app/lib/modules/llm/providers/github.ts +53 -0
- app/lib/modules/llm/providers/lmstudio.ts +30 -5
- app/lib/modules/llm/providers/ollama.ts +21 -4
- app/lib/modules/llm/registry.ts +4 -0
- app/lib/stores/previews.ts +260 -0
- app/lib/stores/theme.ts +3 -3
- app/lib/webcontainer/index.ts +1 -0
- app/routes/api.chat.ts +188 -171
- app/routes/api.check-env-key.ts +16 -0
- app/routes/api.enhancer.ts +8 -30
- app/routes/api.git-proxy.$.ts +65 -65
- app/routes/api.llmcall.ts +15 -29
- app/routes/api.models.$provider.ts +2 -0
- app/routes/api.models.ts +81 -3
- app/routes/webcontainer.preview.$id.tsx +92 -0
- app/utils/constants.ts +1 -34
- docs/.gitignore +2 -1
- docs/docs/CONTRIBUTING.md +2 -2
- docs/docs/FAQ.md +66 -62
- docs/docs/index.md +4 -2
- docs/mkdocs.yml +9 -1
- messages.json +0 -0
- package.json +139 -138
- pnpm-lock.yaml +0 -0
- public/icons/AmazonBedrock.svg +1 -0
- vite.config.ts +7 -2
.env.example
CHANGED
@@ -83,6 +83,17 @@ XAI_API_KEY=
|
|
83 |
# You only need this environment variable set if you want to use Perplexity models
|
84 |
PERPLEXITY_API_KEY=
|
85 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
86 |
# Include this environment variable if you want more logging for debugging locally
|
87 |
VITE_LOG_LEVEL=debug
|
88 |
|
|
|
83 |
# You only need this environment variable set if you want to use Perplexity models
|
84 |
PERPLEXITY_API_KEY=
|
85 |
|
86 |
+
# Get your AWS configuration
|
87 |
+
# https://console.aws.amazon.com/iam/home
|
88 |
+
# The JSON should include the following keys:
|
89 |
+
# - region: The AWS region where Bedrock is available.
|
90 |
+
# - accessKeyId: Your AWS access key ID.
|
91 |
+
# - secretAccessKey: Your AWS secret access key.
|
92 |
+
# - sessionToken (optional): Temporary session token if using an IAM role or temporary credentials.
|
93 |
+
# Example JSON:
|
94 |
+
# {"region": "us-east-1", "accessKeyId": "yourAccessKeyId", "secretAccessKey": "yourSecretAccessKey", "sessionToken": "yourSessionToken"}
|
95 |
+
AWS_BEDROCK_CONFIG=
|
96 |
+
|
97 |
# Include this environment variable if you want more logging for debugging locally
|
98 |
VITE_LOG_LEVEL=debug
|
99 |
|
.github/ISSUE_TEMPLATE/bug_report.yml
CHANGED
@@ -6,8 +6,8 @@ body:
|
|
6 |
value: |
|
7 |
Thank you for reporting an issue :pray:.
|
8 |
|
9 |
-
This issue tracker is for bugs and issues found with [Bolt.
|
10 |
-
If you experience issues related to WebContainer, please file an issue in
|
11 |
|
12 |
The more information you fill in, the better we can help you.
|
13 |
- type: textarea
|
|
|
6 |
value: |
|
7 |
Thank you for reporting an issue :pray:.
|
8 |
|
9 |
+
This issue tracker is for bugs and issues found with [Bolt.diy](https://bolt.diy).
|
10 |
+
If you experience issues related to WebContainer, please file an issue in the official [StackBlitz WebContainer repo](https://github.com/stackblitz/webcontainer-core).
|
11 |
|
12 |
The more information you fill in, the better we can help you.
|
13 |
- type: textarea
|
.github/ISSUE_TEMPLATE/epic.md
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
name: Epic
|
3 |
+
about: Epics define long-term vision and capabilities of the software. They will never be finished but serve as umbrella for features.
|
4 |
+
title: ''
|
5 |
+
labels:
|
6 |
+
- epic
|
7 |
+
assignees: ''
|
8 |
+
---
|
9 |
+
|
10 |
+
# Strategic Impact
|
11 |
+
|
12 |
+
<!-- Why does this area matter? How is it integrated into the product or the development process? What would happen if we ignore it? -->
|
13 |
+
|
14 |
+
# Target Audience
|
15 |
+
|
16 |
+
<!-- Who benefits most from improvements in this area?
|
17 |
+
|
18 |
+
Usual values: Software Developers using the IDE | Contributors -->
|
19 |
+
|
20 |
+
# Capabilities
|
21 |
+
|
22 |
+
<!-- which existing capabilities or future features can be imagined that belong to this epic? This list serves as illustration to sketch the boundaries of this epic.
|
23 |
+
Once features are actually being planned / described in detail, they can be linked here. -->
|
.github/ISSUE_TEMPLATE/feature.md
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
name: Feature
|
3 |
+
about: A pretty vague description of how a capability of our software can be added or improved.
|
4 |
+
title: ''
|
5 |
+
labels:
|
6 |
+
- feature
|
7 |
+
assignees: ''
|
8 |
+
---
|
9 |
+
|
10 |
+
# Motivation
|
11 |
+
|
12 |
+
<!-- What capability should be either established or improved? How is life of the target audience better after it's been done? -->
|
13 |
+
|
14 |
+
# Scope
|
15 |
+
|
16 |
+
<!-- This is kind-of the definition-of-done for a feature.
|
17 |
+
Try to keep the scope as small as possible and prefer creating multiple, small features which each solve a single problem / make something better
|
18 |
+
-->
|
19 |
+
|
20 |
+
# Options
|
21 |
+
|
22 |
+
<!-- If you already have an idea how this can be implemented, please describe it here.
|
23 |
+
This allows potential other contributors to join forces and provide meaningful feedback prio to even starting work on it.
|
24 |
+
-->
|
25 |
+
|
26 |
+
# Related
|
27 |
+
|
28 |
+
<!-- Link to the epic or other issues or PRs which are related to this feature. -->
|
.github/workflows/docker.yaml
CHANGED
@@ -10,6 +10,10 @@ on:
|
|
10 |
- v*
|
11 |
- "*"
|
12 |
|
|
|
|
|
|
|
|
|
13 |
env:
|
14 |
REGISTRY: ghcr.io
|
15 |
DOCKER_IMAGE: ghcr.io/${{ github.repository }}
|
@@ -61,6 +65,7 @@ jobs:
|
|
61 |
context: .
|
62 |
file: ./Dockerfile
|
63 |
target: ${{ env.BUILD_TARGET }}
|
|
|
64 |
push: true
|
65 |
tags: ${{ steps.meta.outputs.tags }}
|
66 |
labels: ${{ steps.meta.outputs.labels }}
|
|
|
10 |
- v*
|
11 |
- "*"
|
12 |
|
13 |
+
permissions:
|
14 |
+
packages: write
|
15 |
+
contents: read
|
16 |
+
|
17 |
env:
|
18 |
REGISTRY: ghcr.io
|
19 |
DOCKER_IMAGE: ghcr.io/${{ github.repository }}
|
|
|
65 |
context: .
|
66 |
file: ./Dockerfile
|
67 |
target: ${{ env.BUILD_TARGET }}
|
68 |
+
platforms: linux/amd64,linux/arm64
|
69 |
push: true
|
70 |
tags: ${{ steps.meta.outputs.tags }}
|
71 |
labels: ${{ steps.meta.outputs.labels }}
|
CONTRIBUTING.md
CHANGED
@@ -144,7 +144,7 @@ docker build . --target bolt-ai-development
|
|
144 |
|
145 |
**Option 3: Docker Compose Profile**
|
146 |
```bash
|
147 |
-
docker
|
148 |
```
|
149 |
|
150 |
#### Running the Development Container
|
@@ -171,7 +171,7 @@ docker build . --target bolt-ai-production
|
|
171 |
|
172 |
**Option 3: Docker Compose Profile**
|
173 |
```bash
|
174 |
-
docker
|
175 |
```
|
176 |
|
177 |
#### Running the Production Container
|
|
|
144 |
|
145 |
**Option 3: Docker Compose Profile**
|
146 |
```bash
|
147 |
+
docker compose --profile development up
|
148 |
```
|
149 |
|
150 |
#### Running the Development Container
|
|
|
171 |
|
172 |
**Option 3: Docker Compose Profile**
|
173 |
```bash
|
174 |
+
docker compose --profile production up
|
175 |
```
|
176 |
|
177 |
#### Running the Production Container
|
PROJECT.md
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Project management of bolt.diy
|
2 |
+
|
3 |
+
First off: this sounds funny, we know. "Project management" comes from a world of enterprise stuff and this project is
|
4 |
+
far from being enterprisy- it's still anarchy all over the place 😉
|
5 |
+
|
6 |
+
But we need to organize ourselves somehow, right?
|
7 |
+
|
8 |
+
> tl;dr: We've got a project board with epics and features. We use PRs as change log and as materialized features. Find it [here](https://github.com/orgs/stackblitz-labs/projects/4).
|
9 |
+
|
10 |
+
Here's how we structure long-term vision, mid-term capabilities of the software and short term improvements.
|
11 |
+
|
12 |
+
## Strategic epics (long-term)
|
13 |
+
|
14 |
+
Strategic epics define areas in which the product evolves. Usually, these epics don’t overlap. They shall allow the core
|
15 |
+
team to define what they believe is most important and should be worked on with the highest priority.
|
16 |
+
|
17 |
+
You can find the [epics as issues](https://github.com/stackblitz-labs/bolt.diy/labels/epic) which are probably never
|
18 |
+
going to be closed.
|
19 |
+
|
20 |
+
What's the benefit / purpose of epics?
|
21 |
+
|
22 |
+
1. Prioritization
|
23 |
+
|
24 |
+
E. g. we could say “managing files is currently more important that quality”. Then, we could thing about which features
|
25 |
+
would bring “managing files” forward. It may be different features, such as “upload local files”, “import from a repo”
|
26 |
+
or also undo/redo/commit.
|
27 |
+
|
28 |
+
In a more-or-less regular meeting dedicated for that, the core team discusses which epics matter most, sketch features
|
29 |
+
and then check who can work on them. After the meeting, they update the roadmap (at least for the next development turn)
|
30 |
+
and this way communicate where the focus currently is.
|
31 |
+
|
32 |
+
2. Grouping of features
|
33 |
+
|
34 |
+
By linking features with epics, we can keep them together and document *why* we invest work into a particular thing.
|
35 |
+
|
36 |
+
## Features (mid-term)
|
37 |
+
|
38 |
+
We all know probably a dozen of methodologies following which features are being described (User story, business
|
39 |
+
function, you name it).
|
40 |
+
|
41 |
+
However, we intentionally describe features in a more vague manner. Why? Everybody loves crisp, well-defined
|
42 |
+
acceptance-criteria, no? Well, every product owner loves it. because he knows what he’ll get once it’s done.
|
43 |
+
|
44 |
+
But: **here is no owner of this product**. Therefore, we grant *maximum flexibility to the developer contributing a feature* – so that he can bring in his ideas and have most fun implementing it.
|
45 |
+
|
46 |
+
The feature therefore tries to describe *what* should be improved but not in detail *how*.
|
47 |
+
|
48 |
+
## PRs as materialized features (short-term)
|
49 |
+
|
50 |
+
Once a developer starts working on a feature, a draft-PR *can* be opened asap to share, describe and discuss, how the feature shall be implemented. But: this is not a must. It just helps to get early feedback and get other developers involved. Sometimes, the developer just wants to get started and then open a PR later.
|
51 |
+
|
52 |
+
In a loosely organized project, it may as well happen that multiple PRs are opened for the same feature. This is no real issue: Usually, peoply being passionate about a solution are willing to join forces and get it done together. And if a second developer was just faster getting the same feature realized: Be happy that it's been done, close the PR and look out for the next feature to implement 🤓
|
53 |
+
|
54 |
+
## PRs as change log
|
55 |
+
|
56 |
+
Once a PR is merged, a squashed commit contains the whole PR description which allows for a good change log.
|
57 |
+
All authors of commits in the PR are mentioned in the squashed commit message and become contributors 🙌
|
README.md
CHANGED
@@ -1,19 +1,14 @@
|
|
1 |
-
---
|
2 |
-
title: bolt.diy
|
3 |
-
emoji: 📉
|
4 |
-
colorFrom: red
|
5 |
-
colorTo: red
|
6 |
-
sdk: docker
|
7 |
-
app_port: 5173
|
8 |
-
pinned: false
|
9 |
-
---
|
10 |
-
|
11 |
# bolt.diy (Previously oTToDev)
|
|
|
12 |
[![bolt.diy: AI-Powered Full-Stack Web Development in the Browser](./public/social_preview_index.jpg)](https://bolt.diy)
|
13 |
|
14 |
Welcome to bolt.diy, the official open source version of Bolt.new (previously known as oTToDev and bolt.new ANY LLM), which allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models.
|
15 |
|
16 |
-
|
|
|
|
|
|
|
|
|
17 |
|
18 |
We have also launched an experimental agent called the "bolt.diy Expert" that can answer common questions about bolt.diy. Find it here on the [oTTomator Live Agent Studio](https://studio.ottomator.ai/).
|
19 |
|
@@ -33,8 +28,15 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
|
|
33 |
|
34 |
## Join the community
|
35 |
|
36 |
-
[Join the bolt.diy community here, in the
|
|
|
|
|
37 |
|
|
|
|
|
|
|
|
|
|
|
38 |
|
39 |
## Requested Additions
|
40 |
|
@@ -57,6 +59,7 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
|
|
57 |
- ✅ Bolt terminal to see the output of LLM run commands (@thecodacus)
|
58 |
- ✅ Streaming of code output (@thecodacus)
|
59 |
- ✅ Ability to revert code to earlier version (@wonderwhy-er)
|
|
|
60 |
- ✅ Cohere Integration (@hasanraiyan)
|
61 |
- ✅ Dynamic model max token length (@hasanraiyan)
|
62 |
- ✅ Better prompt enhancing (@SujalXplores)
|
@@ -65,7 +68,7 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
|
|
65 |
- ✅ Together Integration (@mouimet-infinisoft)
|
66 |
- ✅ Mobile friendly (@qwikode)
|
67 |
- ✅ Better prompt enhancing (@SujalXplores)
|
68 |
-
- ✅ Attach images to prompts (@atrokhym)
|
69 |
- ✅ Added Git Clone button (@thecodacus)
|
70 |
- ✅ Git Import from url (@thecodacus)
|
71 |
- ✅ PromptLibrary to have different variations of prompts for different use cases (@thecodacus)
|
@@ -74,6 +77,8 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
|
|
74 |
- ✅ Detect terminal Errors and ask bolt to fix it (@thecodacus)
|
75 |
- ✅ Detect preview Errors and ask bolt to fix it (@wonderwhy-er)
|
76 |
- ✅ Add Starter Template Options (@thecodacus)
|
|
|
|
|
77 |
- ⬜ **HIGH PRIORITY** - Prevent bolt from rewriting files as often (file locking and diffs)
|
78 |
- ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
|
79 |
- ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
|
@@ -84,12 +89,14 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
|
|
84 |
- ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
|
85 |
- ⬜ Voice prompting
|
86 |
- ⬜ Azure Open AI API Integration
|
87 |
-
- ✅ Perplexity Integration (@meetpateltech)
|
88 |
- ⬜ Vertex AI Integration
|
|
|
|
|
|
|
89 |
|
90 |
## Features
|
91 |
|
92 |
-
- **AI-powered full-stack web development** directly in your browser.
|
93 |
- **Support for multiple LLMs** with an extensible architecture to integrate additional models.
|
94 |
- **Attach images to prompts** for better contextual understanding.
|
95 |
- **Integrated terminal** to view output of LLM-run commands.
|
@@ -97,21 +104,18 @@ bolt.diy was originally started by [Cole Medin](https://www.youtube.com/@ColeMed
|
|
97 |
- **Download projects as ZIP** for easy portability.
|
98 |
- **Integration-ready Docker support** for a hassle-free setup.
|
99 |
|
100 |
-
## Setup
|
101 |
|
102 |
-
If you're new to installing software from GitHub, don't worry! If you encounter any issues, feel free to submit an "issue" using the provided links or improve this documentation by forking the repository, editing the instructions, and submitting a pull request. The following instruction will help you get the stable branch up and running on your local machine in no time.
|
103 |
|
104 |
Let's get you up and running with the stable version of Bolt.DIY!
|
105 |
|
106 |
## Quick Download
|
107 |
|
108 |
-
[![Download Latest Release](https://img.shields.io/github/v/release/stackblitz-labs/bolt.diy?label=Download%20Bolt&sort=semver)](https://github.com/stackblitz-labs/bolt.diy/releases/latest) ← Click here to go the the latest release version!
|
109 |
|
110 |
- Next **click source.zip**
|
111 |
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
## Prerequisites
|
116 |
|
117 |
Before you begin, you'll need to install two important pieces of software:
|
@@ -144,16 +148,19 @@ You have two options for running Bolt.DIY: directly on your machine or using Doc
|
|
144 |
### Option 1: Direct Installation (Recommended for Beginners)
|
145 |
|
146 |
1. **Install Package Manager (pnpm)**:
|
|
|
147 |
```bash
|
148 |
npm install -g pnpm
|
149 |
```
|
150 |
|
151 |
2. **Install Project Dependencies**:
|
|
|
152 |
```bash
|
153 |
pnpm install
|
154 |
```
|
155 |
|
156 |
3. **Start the Application**:
|
|
|
157 |
```bash
|
158 |
pnpm run dev
|
159 |
```
|
@@ -165,11 +172,13 @@ You have two options for running Bolt.DIY: directly on your machine or using Doc
|
|
165 |
This option requires some familiarity with Docker but provides a more isolated environment.
|
166 |
|
167 |
#### Additional Prerequisite
|
|
|
168 |
- Install Docker: [Download Docker](https://www.docker.com/)
|
169 |
|
170 |
#### Steps:
|
171 |
|
172 |
1. **Build the Docker Image**:
|
|
|
173 |
```bash
|
174 |
# Using npm script:
|
175 |
npm run dockerbuild
|
@@ -180,12 +189,9 @@ This option requires some familiarity with Docker but provides a more isolated e
|
|
180 |
|
181 |
2. **Run the Container**:
|
182 |
```bash
|
183 |
-
docker
|
184 |
```
|
185 |
|
186 |
-
|
187 |
-
|
188 |
-
|
189 |
## Configuring API Keys and Providers
|
190 |
|
191 |
### Adding Your API Keys
|
@@ -214,6 +220,7 @@ For providers that support custom base URLs (such as Ollama or LM Studio), follo
|
|
214 |
> **Note**: Custom base URLs are particularly useful when running local instances of AI models or using custom API endpoints.
|
215 |
|
216 |
### Supported Providers
|
|
|
217 |
- Ollama
|
218 |
- LM Studio
|
219 |
- OpenAILike
|
@@ -221,23 +228,27 @@ For providers that support custom base URLs (such as Ollama or LM Studio), follo
|
|
221 |
## Setup Using Git (For Developers only)
|
222 |
|
223 |
This method is recommended for developers who want to:
|
|
|
224 |
- Contribute to the project
|
225 |
- Stay updated with the latest changes
|
226 |
- Switch between different versions
|
227 |
- Create custom modifications
|
228 |
|
229 |
#### Prerequisites
|
|
|
230 |
1. Install Git: [Download Git](https://git-scm.com/downloads)
|
231 |
|
232 |
#### Initial Setup
|
233 |
|
234 |
1. **Clone the Repository**:
|
|
|
235 |
```bash
|
236 |
# Using HTTPS
|
237 |
git clone https://github.com/stackblitz-labs/bolt.diy.git
|
238 |
```
|
239 |
|
240 |
2. **Navigate to Project Directory**:
|
|
|
241 |
```bash
|
242 |
cd bolt.diy
|
243 |
```
|
@@ -247,6 +258,7 @@ This method is recommended for developers who want to:
|
|
247 |
git checkout main
|
248 |
```
|
249 |
4. **Install Dependencies**:
|
|
|
250 |
```bash
|
251 |
pnpm install
|
252 |
```
|
@@ -261,16 +273,19 @@ This method is recommended for developers who want to:
|
|
261 |
To get the latest changes from the repository:
|
262 |
|
263 |
1. **Save Your Local Changes** (if any):
|
|
|
264 |
```bash
|
265 |
git stash
|
266 |
```
|
267 |
|
268 |
2. **Pull Latest Updates**:
|
|
|
269 |
```bash
|
270 |
git pull origin main
|
271 |
```
|
272 |
|
273 |
3. **Update Dependencies**:
|
|
|
274 |
```bash
|
275 |
pnpm install
|
276 |
```
|
@@ -285,6 +300,7 @@ To get the latest changes from the repository:
|
|
285 |
If you encounter issues:
|
286 |
|
287 |
1. **Clean Installation**:
|
|
|
288 |
```bash
|
289 |
# Remove node modules and lock files
|
290 |
rm -rf node_modules pnpm-lock.yaml
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
# bolt.diy (Previously oTToDev)
|
2 |
+
|
3 |
[![bolt.diy: AI-Powered Full-Stack Web Development in the Browser](./public/social_preview_index.jpg)](https://bolt.diy)
|
4 |
|
5 |
Welcome to bolt.diy, the official open source version of Bolt.new (previously known as oTToDev and bolt.new ANY LLM), which allows you to choose the LLM that you use for each prompt! Currently, you can use OpenAI, Anthropic, Ollama, OpenRouter, Gemini, LMStudio, Mistral, xAI, HuggingFace, DeepSeek, or Groq models - and it is easily extended to use any other model supported by the Vercel AI SDK! See the instructions below for running this locally and extending it to include more models.
|
6 |
|
7 |
+
-----
|
8 |
+
Check the [bolt.diy Docs](https://stackblitz-labs.github.io/bolt.diy/) for more offical installation instructions and more informations.
|
9 |
+
|
10 |
+
-----
|
11 |
+
Also [this pinned post in our community](https://thinktank.ottomator.ai/t/videos-tutorial-helpful-content/3243) has a bunch of incredible resources for running and deploying bolt.diy yourself!
|
12 |
|
13 |
We have also launched an experimental agent called the "bolt.diy Expert" that can answer common questions about bolt.diy. Find it here on the [oTTomator Live Agent Studio](https://studio.ottomator.ai/).
|
14 |
|
|
|
28 |
|
29 |
## Join the community
|
30 |
|
31 |
+
[Join the bolt.diy community here, in the oTTomator Think Tank!](https://thinktank.ottomator.ai)
|
32 |
+
|
33 |
+
## Project management
|
34 |
|
35 |
+
Bolt.diy is a community effort! Still, the core team of contributors aims at organizing the project in way that allows
|
36 |
+
you to understand where the current areas of focus are.
|
37 |
+
|
38 |
+
If you want to know what we are working on, what we are planning to work on, or if you want to contribute to the
|
39 |
+
project, please check the [project management guide](./PROJECT.md) to get started easily.
|
40 |
|
41 |
## Requested Additions
|
42 |
|
|
|
59 |
- ✅ Bolt terminal to see the output of LLM run commands (@thecodacus)
|
60 |
- ✅ Streaming of code output (@thecodacus)
|
61 |
- ✅ Ability to revert code to earlier version (@wonderwhy-er)
|
62 |
+
- ✅ Chat history backup and restore functionality (@sidbetatester)
|
63 |
- ✅ Cohere Integration (@hasanraiyan)
|
64 |
- ✅ Dynamic model max token length (@hasanraiyan)
|
65 |
- ✅ Better prompt enhancing (@SujalXplores)
|
|
|
68 |
- ✅ Together Integration (@mouimet-infinisoft)
|
69 |
- ✅ Mobile friendly (@qwikode)
|
70 |
- ✅ Better prompt enhancing (@SujalXplores)
|
71 |
+
- ✅ Attach images to prompts (@atrokhym)(@stijnus)
|
72 |
- ✅ Added Git Clone button (@thecodacus)
|
73 |
- ✅ Git Import from url (@thecodacus)
|
74 |
- ✅ PromptLibrary to have different variations of prompts for different use cases (@thecodacus)
|
|
|
77 |
- ✅ Detect terminal Errors and ask bolt to fix it (@thecodacus)
|
78 |
- ✅ Detect preview Errors and ask bolt to fix it (@wonderwhy-er)
|
79 |
- ✅ Add Starter Template Options (@thecodacus)
|
80 |
+
- ✅ Perplexity Integration (@meetpateltech)
|
81 |
+
- ✅ AWS Bedrock Integration (@kunjabijukchhe)
|
82 |
- ⬜ **HIGH PRIORITY** - Prevent bolt from rewriting files as often (file locking and diffs)
|
83 |
- ⬜ **HIGH PRIORITY** - Better prompting for smaller LLMs (code window sometimes doesn't start)
|
84 |
- ⬜ **HIGH PRIORITY** - Run agents in the backend as opposed to a single model call
|
|
|
89 |
- ⬜ Upload documents for knowledge - UI design templates, a code base to reference coding style, etc.
|
90 |
- ⬜ Voice prompting
|
91 |
- ⬜ Azure Open AI API Integration
|
|
|
92 |
- ⬜ Vertex AI Integration
|
93 |
+
- ⬜ Granite Integration
|
94 |
+
- ✅ Popout Window for Web Container(@stijnus)
|
95 |
+
- ✅ Ability to change Popout window size (@stijnus)
|
96 |
|
97 |
## Features
|
98 |
|
99 |
+
- **AI-powered full-stack web development** for **NodeJS based applications** directly in your browser.
|
100 |
- **Support for multiple LLMs** with an extensible architecture to integrate additional models.
|
101 |
- **Attach images to prompts** for better contextual understanding.
|
102 |
- **Integrated terminal** to view output of LLM-run commands.
|
|
|
104 |
- **Download projects as ZIP** for easy portability.
|
105 |
- **Integration-ready Docker support** for a hassle-free setup.
|
106 |
|
107 |
+
## Setup
|
108 |
|
109 |
+
If you're new to installing software from GitHub, don't worry! If you encounter any issues, feel free to submit an "issue" using the provided links or improve this documentation by forking the repository, editing the instructions, and submitting a pull request. The following instruction will help you get the stable branch up and running on your local machine in no time.
|
110 |
|
111 |
Let's get you up and running with the stable version of Bolt.DIY!
|
112 |
|
113 |
## Quick Download
|
114 |
|
115 |
+
[![Download Latest Release](https://img.shields.io/github/v/release/stackblitz-labs/bolt.diy?label=Download%20Bolt&sort=semver)](https://github.com/stackblitz-labs/bolt.diy/releases/latest) ← Click here to go the the latest release version!
|
116 |
|
117 |
- Next **click source.zip**
|
118 |
|
|
|
|
|
|
|
119 |
## Prerequisites
|
120 |
|
121 |
Before you begin, you'll need to install two important pieces of software:
|
|
|
148 |
### Option 1: Direct Installation (Recommended for Beginners)
|
149 |
|
150 |
1. **Install Package Manager (pnpm)**:
|
151 |
+
|
152 |
```bash
|
153 |
npm install -g pnpm
|
154 |
```
|
155 |
|
156 |
2. **Install Project Dependencies**:
|
157 |
+
|
158 |
```bash
|
159 |
pnpm install
|
160 |
```
|
161 |
|
162 |
3. **Start the Application**:
|
163 |
+
|
164 |
```bash
|
165 |
pnpm run dev
|
166 |
```
|
|
|
172 |
This option requires some familiarity with Docker but provides a more isolated environment.
|
173 |
|
174 |
#### Additional Prerequisite
|
175 |
+
|
176 |
- Install Docker: [Download Docker](https://www.docker.com/)
|
177 |
|
178 |
#### Steps:
|
179 |
|
180 |
1. **Build the Docker Image**:
|
181 |
+
|
182 |
```bash
|
183 |
# Using npm script:
|
184 |
npm run dockerbuild
|
|
|
189 |
|
190 |
2. **Run the Container**:
|
191 |
```bash
|
192 |
+
docker compose --profile development up
|
193 |
```
|
194 |
|
|
|
|
|
|
|
195 |
## Configuring API Keys and Providers
|
196 |
|
197 |
### Adding Your API Keys
|
|
|
220 |
> **Note**: Custom base URLs are particularly useful when running local instances of AI models or using custom API endpoints.
|
221 |
|
222 |
### Supported Providers
|
223 |
+
|
224 |
- Ollama
|
225 |
- LM Studio
|
226 |
- OpenAILike
|
|
|
228 |
## Setup Using Git (For Developers only)
|
229 |
|
230 |
This method is recommended for developers who want to:
|
231 |
+
|
232 |
- Contribute to the project
|
233 |
- Stay updated with the latest changes
|
234 |
- Switch between different versions
|
235 |
- Create custom modifications
|
236 |
|
237 |
#### Prerequisites
|
238 |
+
|
239 |
1. Install Git: [Download Git](https://git-scm.com/downloads)
|
240 |
|
241 |
#### Initial Setup
|
242 |
|
243 |
1. **Clone the Repository**:
|
244 |
+
|
245 |
```bash
|
246 |
# Using HTTPS
|
247 |
git clone https://github.com/stackblitz-labs/bolt.diy.git
|
248 |
```
|
249 |
|
250 |
2. **Navigate to Project Directory**:
|
251 |
+
|
252 |
```bash
|
253 |
cd bolt.diy
|
254 |
```
|
|
|
258 |
git checkout main
|
259 |
```
|
260 |
4. **Install Dependencies**:
|
261 |
+
|
262 |
```bash
|
263 |
pnpm install
|
264 |
```
|
|
|
273 |
To get the latest changes from the repository:
|
274 |
|
275 |
1. **Save Your Local Changes** (if any):
|
276 |
+
|
277 |
```bash
|
278 |
git stash
|
279 |
```
|
280 |
|
281 |
2. **Pull Latest Updates**:
|
282 |
+
|
283 |
```bash
|
284 |
git pull origin main
|
285 |
```
|
286 |
|
287 |
3. **Update Dependencies**:
|
288 |
+
|
289 |
```bash
|
290 |
pnpm install
|
291 |
```
|
|
|
300 |
If you encounter issues:
|
301 |
|
302 |
1. **Clean Installation**:
|
303 |
+
|
304 |
```bash
|
305 |
# Remove node modules and lock files
|
306 |
rm -rf node_modules pnpm-lock.yaml
|
app/components/chat/APIKeyManager.tsx
CHANGED
@@ -1,9 +1,8 @@
|
|
1 |
-
import React, { useState, useEffect } from 'react';
|
2 |
import { IconButton } from '~/components/ui/IconButton';
|
3 |
import { Switch } from '~/components/ui/Switch';
|
4 |
import type { ProviderInfo } from '~/types/model';
|
5 |
import Cookies from 'js-cookie';
|
6 |
-
|
7 |
interface APIKeyManagerProps {
|
8 |
provider: ProviderInfo;
|
9 |
apiKey: string;
|
@@ -12,11 +11,14 @@ interface APIKeyManagerProps {
|
|
12 |
labelForGetApiKey?: string;
|
13 |
}
|
14 |
|
|
|
|
|
|
|
15 |
const apiKeyMemoizeCache: { [k: string]: Record<string, string> } = {};
|
16 |
|
17 |
export function getApiKeysFromCookies() {
|
18 |
const storedApiKeys = Cookies.get('apiKeys');
|
19 |
-
let parsedKeys = {};
|
20 |
|
21 |
if (storedApiKeys) {
|
22 |
parsedKeys = apiKeyMemoizeCache[storedApiKeys];
|
@@ -32,99 +34,151 @@ export function getApiKeysFromCookies() {
|
|
32 |
// eslint-disable-next-line @typescript-eslint/naming-convention
|
33 |
export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => {
|
34 |
const [isEditing, setIsEditing] = useState(false);
|
35 |
-
const [tempKey, setTempKey] = useState(apiKey
|
36 |
const [isPromptCachingEnabled, setIsPromptCachingEnabled] = useState(() => {
|
37 |
// Read initial state from localStorage, defaulting to true
|
38 |
const savedState = localStorage.getItem('PROMPT_CACHING_ENABLED');
|
39 |
return savedState !== null ? JSON.parse(savedState) : true;
|
40 |
});
|
|
|
41 |
|
42 |
-
|
43 |
-
|
|
|
|
|
44 |
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
47 |
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
|
|
|
|
57 |
|
58 |
-
|
59 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
60 |
|
61 |
-
|
62 |
-
|
|
|
63 |
|
64 |
const handleSave = () => {
|
|
|
65 |
setApiKey(tempKey);
|
66 |
-
setIsEditing(false);
|
67 |
-
cachedApiKeysOps().save(tempKey);
|
68 |
-
};
|
69 |
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
}, [tempKey]);
|
75 |
|
76 |
-
|
77 |
-
|
78 |
-
localStorage.setItem('PROMPT_CACHING_ENABLED', JSON.stringify(isPromptCachingEnabled));
|
79 |
-
}, [isPromptCachingEnabled]);
|
80 |
|
81 |
return (
|
82 |
-
<div className="
|
83 |
-
<div className="flex
|
84 |
-
<div>
|
85 |
-
<
|
86 |
-
|
87 |
-
|
88 |
-
<
|
89 |
-
{apiKey ?
|
90 |
-
|
91 |
-
|
92 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93 |
</IconButton>
|
94 |
</div>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
)}
|
96 |
</div>
|
97 |
-
|
98 |
-
{isEditing ? (
|
99 |
-
<div className="flex items-center gap-3 mt-2">
|
100 |
-
<input
|
101 |
-
type="password"
|
102 |
-
value={tempKey}
|
103 |
-
placeholder="Your API Key"
|
104 |
-
onChange={(e) => setTempKey(e.target.value)}
|
105 |
-
className="flex-1 px-2 py-1 text-xs lg:text-sm rounded border border-bolt-elements-borderColor bg-bolt-elements-prompt-background text-bolt-elements-textPrimary focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus"
|
106 |
-
/>
|
107 |
-
<IconButton onClick={handleSave} title="Save API Key">
|
108 |
-
<div className="i-ph:check" />
|
109 |
-
</IconButton>
|
110 |
-
<IconButton onClick={() => setIsEditing(false)} title="Cancel">
|
111 |
-
<div className="i-ph:x" />
|
112 |
-
</IconButton>
|
113 |
-
</div>
|
114 |
-
) : (
|
115 |
-
<>
|
116 |
-
{provider?.getApiKeyLink && (
|
117 |
-
<IconButton className="ml-auto" onClick={() => window.open(provider?.getApiKeyLink)} title="Edit API Key">
|
118 |
-
<span className="mr-2 text-xs lg:text-sm">{provider?.labelForGetApiKey || 'Get API Key'}</span>
|
119 |
-
<div className={provider?.icon || 'i-ph:key'} />
|
120 |
-
</IconButton>
|
121 |
-
)}
|
122 |
-
</>
|
123 |
-
)}
|
124 |
</div>
|
125 |
|
126 |
{provider?.name === 'Anthropic' && (
|
127 |
-
<div className="border-t pt-4 pb-
|
128 |
<div className="flex items-center space-x-2">
|
129 |
<Switch checked={isPromptCachingEnabled} onCheckedChange={setIsPromptCachingEnabled} />
|
130 |
<label htmlFor="prompt-caching" className="text-sm text-bolt-elements-textSecondary">
|
@@ -132,7 +186,7 @@ export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey,
|
|
132 |
</label>
|
133 |
</div>
|
134 |
<p className="text-xs text-bolt-elements-textTertiary mt-2">
|
135 |
-
When enabled,
|
136 |
</p>
|
137 |
</div>
|
138 |
)}
|
|
|
1 |
+
import React, { useState, useEffect, useCallback } from 'react';
|
2 |
import { IconButton } from '~/components/ui/IconButton';
|
3 |
import { Switch } from '~/components/ui/Switch';
|
4 |
import type { ProviderInfo } from '~/types/model';
|
5 |
import Cookies from 'js-cookie';
|
|
|
6 |
interface APIKeyManagerProps {
|
7 |
provider: ProviderInfo;
|
8 |
apiKey: string;
|
|
|
11 |
labelForGetApiKey?: string;
|
12 |
}
|
13 |
|
14 |
+
// cache which stores whether the provider's API key is set via environment variable
|
15 |
+
const providerEnvKeyStatusCache: Record<string, boolean> = {};
|
16 |
+
|
17 |
const apiKeyMemoizeCache: { [k: string]: Record<string, string> } = {};
|
18 |
|
19 |
export function getApiKeysFromCookies() {
|
20 |
const storedApiKeys = Cookies.get('apiKeys');
|
21 |
+
let parsedKeys: Record<string, string> = {};
|
22 |
|
23 |
if (storedApiKeys) {
|
24 |
parsedKeys = apiKeyMemoizeCache[storedApiKeys];
|
|
|
34 |
// eslint-disable-next-line @typescript-eslint/naming-convention
|
35 |
export const APIKeyManager: React.FC<APIKeyManagerProps> = ({ provider, apiKey, setApiKey }) => {
|
36 |
const [isEditing, setIsEditing] = useState(false);
|
37 |
+
const [tempKey, setTempKey] = useState(apiKey);
|
38 |
const [isPromptCachingEnabled, setIsPromptCachingEnabled] = useState(() => {
|
39 |
// Read initial state from localStorage, defaulting to true
|
40 |
const savedState = localStorage.getItem('PROMPT_CACHING_ENABLED');
|
41 |
return savedState !== null ? JSON.parse(savedState) : true;
|
42 |
});
|
43 |
+
const [isEnvKeySet, setIsEnvKeySet] = useState(false);
|
44 |
|
45 |
+
useEffect(() => {
|
46 |
+
// Update localStorage whenever the prompt caching state changes
|
47 |
+
localStorage.setItem('PROMPT_CACHING_ENABLED', JSON.stringify(isPromptCachingEnabled));
|
48 |
+
}, [isPromptCachingEnabled]);
|
49 |
|
50 |
+
// Reset states and load saved key when provider changes
|
51 |
+
useEffect(() => {
|
52 |
+
// Load saved API key from cookies for this provider
|
53 |
+
const savedKeys = getApiKeysFromCookies();
|
54 |
+
const savedKey = savedKeys[provider.name] || '';
|
55 |
|
56 |
+
setTempKey(savedKey);
|
57 |
+
setApiKey(savedKey);
|
58 |
+
setIsEditing(false);
|
59 |
+
}, [provider.name]);
|
60 |
|
61 |
+
const checkEnvApiKey = useCallback(async () => {
|
62 |
+
// Check cache first
|
63 |
+
if (providerEnvKeyStatusCache[provider.name] !== undefined) {
|
64 |
+
setIsEnvKeySet(providerEnvKeyStatusCache[provider.name]);
|
65 |
+
return;
|
66 |
+
}
|
67 |
|
68 |
+
try {
|
69 |
+
const response = await fetch(`/api/check-env-key?provider=${encodeURIComponent(provider.name)}`);
|
70 |
+
const data = await response.json();
|
71 |
+
const isSet = (data as { isSet: boolean }).isSet;
|
72 |
+
|
73 |
+
// Cache the result
|
74 |
+
providerEnvKeyStatusCache[provider.name] = isSet;
|
75 |
+
setIsEnvKeySet(isSet);
|
76 |
+
} catch (error) {
|
77 |
+
console.error('Failed to check environment API key:', error);
|
78 |
+
setIsEnvKeySet(false);
|
79 |
+
}
|
80 |
+
}, [provider.name]);
|
81 |
|
82 |
+
useEffect(() => {
|
83 |
+
checkEnvApiKey();
|
84 |
+
}, [checkEnvApiKey]);
|
85 |
|
86 |
const handleSave = () => {
|
87 |
+
// Save to parent state
|
88 |
setApiKey(tempKey);
|
|
|
|
|
|
|
89 |
|
90 |
+
// Save to cookies
|
91 |
+
const currentKeys = getApiKeysFromCookies();
|
92 |
+
const newKeys = { ...currentKeys, [provider.name]: tempKey };
|
93 |
+
Cookies.set('apiKeys', JSON.stringify(newKeys));
|
|
|
94 |
|
95 |
+
setIsEditing(false);
|
96 |
+
};
|
|
|
|
|
97 |
|
98 |
return (
|
99 |
+
<div className="flex flex-col items-left justify-between py-3 px-1">
|
100 |
+
<div className="flex">
|
101 |
+
<div className="flex items-center gap-2 flex-1">
|
102 |
+
<div className="flex items-center gap-2">
|
103 |
+
<span className="text-sm font-medium text-bolt-elements-textSecondary">{provider?.name} API Key:</span>
|
104 |
+
{!isEditing && (
|
105 |
+
<div className="flex items-center gap-2">
|
106 |
+
{apiKey ? (
|
107 |
+
<>
|
108 |
+
<div className="i-ph:check-circle-fill text-green-500 w-4 h-4" />
|
109 |
+
<span className="text-xs text-green-500">Set via UI</span>
|
110 |
+
</>
|
111 |
+
) : isEnvKeySet ? (
|
112 |
+
<>
|
113 |
+
<div className="i-ph:check-circle-fill text-green-500 w-4 h-4" />
|
114 |
+
<span className="text-xs text-green-500">Set via environment variable</span>
|
115 |
+
</>
|
116 |
+
) : (
|
117 |
+
<>
|
118 |
+
<div className="i-ph:x-circle-fill text-red-500 w-4 h-4" />
|
119 |
+
<span className="text-xs text-red-500">Not Set (Please set via UI or ENV_VAR)</span>
|
120 |
+
</>
|
121 |
+
)}
|
122 |
+
</div>
|
123 |
+
)}
|
124 |
+
</div>
|
125 |
+
</div>
|
126 |
+
|
127 |
+
<div className="flex items-center gap-2 shrink-0">
|
128 |
+
{isEditing ? (
|
129 |
+
<div className="flex items-center gap-2">
|
130 |
+
<input
|
131 |
+
type="password"
|
132 |
+
value={tempKey}
|
133 |
+
placeholder="Enter API Key"
|
134 |
+
onChange={(e) => setTempKey(e.target.value)}
|
135 |
+
className="w-[300px] px-3 py-1.5 text-sm rounded border border-bolt-elements-borderColor
|
136 |
+
bg-bolt-elements-prompt-background text-bolt-elements-textPrimary
|
137 |
+
focus:outline-none focus:ring-2 focus:ring-bolt-elements-focus"
|
138 |
+
/>
|
139 |
+
<IconButton
|
140 |
+
onClick={handleSave}
|
141 |
+
title="Save API Key"
|
142 |
+
className="bg-green-500/10 hover:bg-green-500/20 text-green-500"
|
143 |
+
>
|
144 |
+
<div className="i-ph:check w-4 h-4" />
|
145 |
+
</IconButton>
|
146 |
+
<IconButton
|
147 |
+
onClick={() => setIsEditing(false)}
|
148 |
+
title="Cancel"
|
149 |
+
className="bg-red-500/10 hover:bg-red-500/20 text-red-500"
|
150 |
+
>
|
151 |
+
<div className="i-ph:x w-4 h-4" />
|
152 |
</IconButton>
|
153 |
</div>
|
154 |
+
) : (
|
155 |
+
<>
|
156 |
+
{
|
157 |
+
<IconButton
|
158 |
+
onClick={() => setIsEditing(true)}
|
159 |
+
title="Edit API Key"
|
160 |
+
className="bg-blue-500/10 hover:bg-blue-500/20 text-blue-500"
|
161 |
+
>
|
162 |
+
<div className="i-ph:pencil-simple w-4 h-4" />
|
163 |
+
</IconButton>
|
164 |
+
}
|
165 |
+
{provider?.getApiKeyLink && !apiKey && (
|
166 |
+
<IconButton
|
167 |
+
onClick={() => window.open(provider?.getApiKeyLink)}
|
168 |
+
title="Get API Key"
|
169 |
+
className="bg-purple-500/10 hover:bg-purple-500/20 text-purple-500 flex items-center gap-2"
|
170 |
+
>
|
171 |
+
<span className="text-xs whitespace-nowrap">{provider?.labelForGetApiKey || 'Get API Key'}</span>
|
172 |
+
<div className={`${provider?.icon || 'i-ph:key'} w-4 h-4`} />
|
173 |
+
</IconButton>
|
174 |
+
)}
|
175 |
+
</>
|
176 |
)}
|
177 |
</div>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
178 |
</div>
|
179 |
|
180 |
{provider?.name === 'Anthropic' && (
|
181 |
+
<div className="border-t mt-4 pt-4 pb-2 -mt-4">
|
182 |
<div className="flex items-center space-x-2">
|
183 |
<Switch checked={isPromptCachingEnabled} onCheckedChange={setIsPromptCachingEnabled} />
|
184 |
<label htmlFor="prompt-caching" className="text-sm text-bolt-elements-textSecondary">
|
|
|
186 |
</label>
|
187 |
</div>
|
188 |
<p className="text-xs text-bolt-elements-textTertiary mt-2">
|
189 |
+
When enabled, generates 10x cheaper responses if re-prompted within 5 mins (Recommended)
|
190 |
</p>
|
191 |
</div>
|
192 |
)}
|
app/components/chat/BaseChat.tsx
CHANGED
@@ -3,13 +3,13 @@
|
|
3 |
* Preventing TS checks with files presented in the video for a better presentation.
|
4 |
*/
|
5 |
import type { Message } from 'ai';
|
6 |
-
import React, { type RefCallback,
|
7 |
import { ClientOnly } from 'remix-utils/client-only';
|
8 |
import { Menu } from '~/components/sidebar/Menu.client';
|
9 |
import { IconButton } from '~/components/ui/IconButton';
|
10 |
import { Workbench } from '~/components/workbench/Workbench.client';
|
11 |
import { classNames } from '~/utils/classNames';
|
12 |
-
import {
|
13 |
import { Messages } from './Messages.client';
|
14 |
import { SendButton } from './SendButton.client';
|
15 |
import { APIKeyManager, getApiKeysFromCookies } from './APIKeyManager';
|
@@ -25,13 +25,13 @@ import GitCloneButton from './GitCloneButton';
|
|
25 |
import FilePreview from './FilePreview';
|
26 |
import { ModelSelector } from '~/components/chat/ModelSelector';
|
27 |
import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
|
28 |
-
import type {
|
29 |
import { ScreenshotStateManager } from './ScreenshotStateManager';
|
30 |
import { toast } from 'react-toastify';
|
31 |
import StarterTemplates from './StarterTemplates';
|
32 |
import type { ActionAlert } from '~/types/actions';
|
33 |
import ChatAlert from './ChatAlert';
|
34 |
-
import {
|
35 |
|
36 |
const TEXTAREA_MIN_HEIGHT = 76;
|
37 |
|
@@ -102,35 +102,13 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
102 |
) => {
|
103 |
const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
|
104 |
const [apiKeys, setApiKeys] = useState<Record<string, string>>(getApiKeysFromCookies());
|
105 |
-
const [modelList, setModelList] = useState(
|
106 |
const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
|
107 |
const [isListening, setIsListening] = useState(false);
|
108 |
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
|
109 |
const [transcript, setTranscript] = useState('');
|
110 |
const [isModelLoading, setIsModelLoading] = useState<string | undefined>('all');
|
111 |
|
112 |
-
const getProviderSettings = useCallback(() => {
|
113 |
-
let providerSettings: Record<string, IProviderSetting> | undefined = undefined;
|
114 |
-
|
115 |
-
try {
|
116 |
-
const savedProviderSettings = Cookies.get('providers');
|
117 |
-
|
118 |
-
if (savedProviderSettings) {
|
119 |
-
const parsedProviderSettings = JSON.parse(savedProviderSettings);
|
120 |
-
|
121 |
-
if (typeof parsedProviderSettings === 'object' && parsedProviderSettings !== null) {
|
122 |
-
providerSettings = parsedProviderSettings;
|
123 |
-
}
|
124 |
-
}
|
125 |
-
} catch (error) {
|
126 |
-
console.error('Error loading Provider Settings from cookies:', error);
|
127 |
-
|
128 |
-
// Clear invalid cookie data
|
129 |
-
Cookies.remove('providers');
|
130 |
-
}
|
131 |
-
|
132 |
-
return providerSettings;
|
133 |
-
}, []);
|
134 |
useEffect(() => {
|
135 |
console.log(transcript);
|
136 |
}, [transcript]);
|
@@ -169,7 +147,6 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
169 |
|
170 |
useEffect(() => {
|
171 |
if (typeof window !== 'undefined') {
|
172 |
-
const providerSettings = getProviderSettings();
|
173 |
let parsedApiKeys: Record<string, string> | undefined = {};
|
174 |
|
175 |
try {
|
@@ -177,53 +154,48 @@ export const BaseChat = React.forwardRef<HTMLDivElement, BaseChatProps>(
|
|
177 |
setApiKeys(parsedApiKeys);
|
178 |
} catch (error) {
|
179 |
console.error('Error loading API keys from cookies:', error);
|
180 |
-
|
181 |
-
// Clear invalid cookie data
|
182 |
Cookies.remove('apiKeys');
|
183 |
}
|
|
|
184 |
setIsModelLoading('all');
|
185 |
-
|
186 |
-
.then((
|
187 |
-
|
188 |
-
|
|
|
189 |
})
|
190 |
.catch((error) => {
|
191 |
-
console.error('Error
|
192 |
})
|
193 |
.finally(() => {
|
194 |
setIsModelLoading(undefined);
|
195 |
});
|
196 |
}
|
197 |
-
}, [providerList]);
|
198 |
|
199 |
const onApiKeysChange = async (providerName: string, apiKey: string) => {
|
200 |
const newApiKeys = { ...apiKeys, [providerName]: apiKey };
|
201 |
setApiKeys(newApiKeys);
|
202 |
Cookies.set('apiKeys', JSON.stringify(newApiKeys));
|
203 |
|
204 |
-
|
205 |
|
206 |
-
|
207 |
-
setIsModelLoading(providerName);
|
208 |
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
import.meta.env || process.env || {},
|
216 |
-
);
|
217 |
-
|
218 |
-
setModelList((preModels) => {
|
219 |
-
const filteredOutPreModels = preModels.filter((x) => x.provider !== providerName);
|
220 |
-
return [...filteredOutPreModels, ...staticModels, ...dynamicModels];
|
221 |
-
});
|
222 |
-
} catch (error) {
|
223 |
-
console.error('Error loading dynamic models:', error);
|
224 |
-
}
|
225 |
-
setIsModelLoading(undefined);
|
226 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
227 |
};
|
228 |
|
229 |
const startListening = () => {
|
|
|
3 |
* Preventing TS checks with files presented in the video for a better presentation.
|
4 |
*/
|
5 |
import type { Message } from 'ai';
|
6 |
+
import React, { type RefCallback, useEffect, useState } from 'react';
|
7 |
import { ClientOnly } from 'remix-utils/client-only';
|
8 |
import { Menu } from '~/components/sidebar/Menu.client';
|
9 |
import { IconButton } from '~/components/ui/IconButton';
|
10 |
import { Workbench } from '~/components/workbench/Workbench.client';
|
11 |
import { classNames } from '~/utils/classNames';
|
12 |
+
import { PROVIDER_LIST } from '~/utils/constants';
|
13 |
import { Messages } from './Messages.client';
|
14 |
import { SendButton } from './SendButton.client';
|
15 |
import { APIKeyManager, getApiKeysFromCookies } from './APIKeyManager';
|
|
|
25 |
import FilePreview from './FilePreview';
|
26 |
import { ModelSelector } from '~/components/chat/ModelSelector';
|
27 |
import { SpeechRecognitionButton } from '~/components/chat/SpeechRecognition';
|
28 |
+
import type { ProviderInfo } from '~/types/model';
|
29 |
import { ScreenshotStateManager } from './ScreenshotStateManager';
|
30 |
import { toast } from 'react-toastify';
|
31 |
import StarterTemplates from './StarterTemplates';
|
32 |
import type { ActionAlert } from '~/types/actions';
|
33 |
import ChatAlert from './ChatAlert';
|
34 |
+
import type { ModelInfo } from '~/lib/modules/llm/types';
|
35 |
|
36 |
const TEXTAREA_MIN_HEIGHT = 76;
|
37 |
|
|
|
102 |
) => {
|
103 |
const TEXTAREA_MAX_HEIGHT = chatStarted ? 400 : 200;
|
104 |
const [apiKeys, setApiKeys] = useState<Record<string, string>>(getApiKeysFromCookies());
|
105 |
+
const [modelList, setModelList] = useState<ModelInfo[]>([]);
|
106 |
const [isModelSettingsCollapsed, setIsModelSettingsCollapsed] = useState(false);
|
107 |
const [isListening, setIsListening] = useState(false);
|
108 |
const [recognition, setRecognition] = useState<SpeechRecognition | null>(null);
|
109 |
const [transcript, setTranscript] = useState('');
|
110 |
const [isModelLoading, setIsModelLoading] = useState<string | undefined>('all');
|
111 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
useEffect(() => {
|
113 |
console.log(transcript);
|
114 |
}, [transcript]);
|
|
|
147 |
|
148 |
useEffect(() => {
|
149 |
if (typeof window !== 'undefined') {
|
|
|
150 |
let parsedApiKeys: Record<string, string> | undefined = {};
|
151 |
|
152 |
try {
|
|
|
154 |
setApiKeys(parsedApiKeys);
|
155 |
} catch (error) {
|
156 |
console.error('Error loading API keys from cookies:', error);
|
|
|
|
|
157 |
Cookies.remove('apiKeys');
|
158 |
}
|
159 |
+
|
160 |
setIsModelLoading('all');
|
161 |
+
fetch('/api/models')
|
162 |
+
.then((response) => response.json())
|
163 |
+
.then((data) => {
|
164 |
+
const typedData = data as { modelList: ModelInfo[] };
|
165 |
+
setModelList(typedData.modelList);
|
166 |
})
|
167 |
.catch((error) => {
|
168 |
+
console.error('Error fetching model list:', error);
|
169 |
})
|
170 |
.finally(() => {
|
171 |
setIsModelLoading(undefined);
|
172 |
});
|
173 |
}
|
174 |
+
}, [providerList, provider]);
|
175 |
|
176 |
const onApiKeysChange = async (providerName: string, apiKey: string) => {
|
177 |
const newApiKeys = { ...apiKeys, [providerName]: apiKey };
|
178 |
setApiKeys(newApiKeys);
|
179 |
Cookies.set('apiKeys', JSON.stringify(newApiKeys));
|
180 |
|
181 |
+
setIsModelLoading(providerName);
|
182 |
|
183 |
+
let providerModels: ModelInfo[] = [];
|
|
|
184 |
|
185 |
+
try {
|
186 |
+
const response = await fetch(`/api/models/${encodeURIComponent(providerName)}`);
|
187 |
+
const data = await response.json();
|
188 |
+
providerModels = (data as { modelList: ModelInfo[] }).modelList;
|
189 |
+
} catch (error) {
|
190 |
+
console.error('Error loading dynamic models for:', providerName, error);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
191 |
}
|
192 |
+
|
193 |
+
// Only update models for the specific provider
|
194 |
+
setModelList((prevModels) => {
|
195 |
+
const otherModels = prevModels.filter((model) => model.provider !== providerName);
|
196 |
+
return [...otherModels, ...providerModels];
|
197 |
+
});
|
198 |
+
setIsModelLoading(undefined);
|
199 |
};
|
200 |
|
201 |
const startListening = () => {
|
app/components/chat/Chat.client.tsx
CHANGED
@@ -156,36 +156,37 @@ export const ChatImpl = memo(
|
|
156 |
|
157 |
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
|
158 |
|
159 |
-
const { messages, isLoading, input, handleInputChange, setInput, stop, append, setMessages, reload } =
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
if (usage) {
|
179 |
-
console.log('Token usage:', usage);
|
180 |
-
|
181 |
-
// You can now use the usage data as needed
|
182 |
-
}
|
183 |
|
184 |
-
|
185 |
-
|
186 |
-
|
187 |
-
|
188 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
189 |
useEffect(() => {
|
190 |
const prompt = searchParams.get('prompt');
|
191 |
|
@@ -283,6 +284,10 @@ export const ChatImpl = memo(
|
|
283 |
*/
|
284 |
await workbenchStore.saveAllFiles();
|
285 |
|
|
|
|
|
|
|
|
|
286 |
const fileModifications = workbenchStore.getFileModifcations();
|
287 |
|
288 |
chatStore.setKey('aborted', false);
|
|
|
156 |
|
157 |
const [apiKeys, setApiKeys] = useState<Record<string, string>>({});
|
158 |
|
159 |
+
const { messages, isLoading, input, handleInputChange, setInput, stop, append, setMessages, reload, error } =
|
160 |
+
useChat({
|
161 |
+
api: '/api/chat',
|
162 |
+
body: {
|
163 |
+
apiKeys,
|
164 |
+
files,
|
165 |
+
promptId,
|
166 |
+
contextOptimization: contextOptimizationEnabled,
|
167 |
+
isPromptCachingEnabled: provider.name === 'Anthropic' && isPromptCachingEnabled(),
|
168 |
+
},
|
169 |
+
sendExtraMessageFields: true,
|
170 |
+
onError: (e) => {
|
171 |
+
logger.error('Request failed\n\n', e, error);
|
172 |
+
toast.error(
|
173 |
+
'There was an error processing your request: ' + (e.message ? e.message : 'No details were returned'),
|
174 |
+
);
|
175 |
+
},
|
176 |
+
onFinish: (message, response) => {
|
177 |
+
const usage = response.usage;
|
|
|
|
|
|
|
|
|
|
|
178 |
|
179 |
+
if (usage) {
|
180 |
+
console.log('Token usage:', usage);
|
181 |
+
|
182 |
+
// You can now use the usage data as needed
|
183 |
+
}
|
184 |
+
|
185 |
+
logger.debug('Finished streaming');
|
186 |
+
},
|
187 |
+
initialMessages,
|
188 |
+
initialInput: Cookies.get(PROMPT_COOKIE_KEY) || '',
|
189 |
+
});
|
190 |
useEffect(() => {
|
191 |
const prompt = searchParams.get('prompt');
|
192 |
|
|
|
284 |
*/
|
285 |
await workbenchStore.saveAllFiles();
|
286 |
|
287 |
+
if (error != null) {
|
288 |
+
setMessages(messages.slice(0, -1));
|
289 |
+
}
|
290 |
+
|
291 |
const fileModifications = workbenchStore.getFileModifcations();
|
292 |
|
293 |
chatStore.setKey('aborted', false);
|
app/components/chat/chatExportAndImport/ImportButtons.tsx
CHANGED
@@ -2,6 +2,11 @@ import type { Message } from 'ai';
|
|
2 |
import { toast } from 'react-toastify';
|
3 |
import { ImportFolderButton } from '~/components/chat/ImportFolderButton';
|
4 |
|
|
|
|
|
|
|
|
|
|
|
5 |
export function ImportButtons(importChat: ((description: string, messages: Message[]) => Promise<void>) | undefined) {
|
6 |
return (
|
7 |
<div className="flex flex-col items-center justify-center w-auto">
|
@@ -20,14 +25,17 @@ export function ImportButtons(importChat: ((description: string, messages: Messa
|
|
20 |
reader.onload = async (e) => {
|
21 |
try {
|
22 |
const content = e.target?.result as string;
|
23 |
-
const data = JSON.parse(content);
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
-
|
26 |
-
toast.error('Invalid chat file format');
|
27 |
}
|
28 |
|
29 |
-
|
30 |
-
toast.success('Chat imported successfully');
|
31 |
} catch (error: unknown) {
|
32 |
if (error instanceof Error) {
|
33 |
toast.error('Failed to parse chat file: ' + error.message);
|
|
|
2 |
import { toast } from 'react-toastify';
|
3 |
import { ImportFolderButton } from '~/components/chat/ImportFolderButton';
|
4 |
|
5 |
+
type ChatData = {
|
6 |
+
messages?: Message[]; // Standard Bolt format
|
7 |
+
description?: string; // Optional description
|
8 |
+
};
|
9 |
+
|
10 |
export function ImportButtons(importChat: ((description: string, messages: Message[]) => Promise<void>) | undefined) {
|
11 |
return (
|
12 |
<div className="flex flex-col items-center justify-center w-auto">
|
|
|
25 |
reader.onload = async (e) => {
|
26 |
try {
|
27 |
const content = e.target?.result as string;
|
28 |
+
const data = JSON.parse(content) as ChatData;
|
29 |
+
|
30 |
+
// Standard format
|
31 |
+
if (Array.isArray(data.messages)) {
|
32 |
+
await importChat(data.description || 'Imported Chat', data.messages);
|
33 |
+
toast.success('Chat imported successfully');
|
34 |
|
35 |
+
return;
|
|
|
36 |
}
|
37 |
|
38 |
+
toast.error('Invalid chat file format');
|
|
|
39 |
} catch (error: unknown) {
|
40 |
if (error instanceof Error) {
|
41 |
toast.error('Failed to parse chat file: ' + error.message);
|
app/components/git/GitUrlImport.client.tsx
CHANGED
@@ -1,141 +1,141 @@
|
|
1 |
-
import { useSearchParams } from '@remix-run/react';
|
2 |
-
import { generateId, type Message } from 'ai';
|
3 |
-
import ignore from 'ignore';
|
4 |
-
import { useEffect, useState } from 'react';
|
5 |
-
import { ClientOnly } from 'remix-utils/client-only';
|
6 |
-
import { BaseChat } from '~/components/chat/BaseChat';
|
7 |
-
import { Chat } from '~/components/chat/Chat.client';
|
8 |
-
import { useGit } from '~/lib/hooks/useGit';
|
9 |
-
import { useChatHistory } from '~/lib/persistence';
|
10 |
-
import { createCommandsMessage, detectProjectCommands } from '~/utils/projectCommands';
|
11 |
-
import { LoadingOverlay } from '~/components/ui/LoadingOverlay';
|
12 |
-
import { toast } from 'react-toastify';
|
13 |
-
|
14 |
-
const IGNORE_PATTERNS = [
|
15 |
-
'node_modules/**',
|
16 |
-
'.git/**',
|
17 |
-
'.github/**',
|
18 |
-
'.vscode/**',
|
19 |
-
'**/*.jpg',
|
20 |
-
'**/*.jpeg',
|
21 |
-
'**/*.png',
|
22 |
-
'dist/**',
|
23 |
-
'build/**',
|
24 |
-
'.next/**',
|
25 |
-
'coverage/**',
|
26 |
-
'.cache/**',
|
27 |
-
'.vscode/**',
|
28 |
-
'.idea/**',
|
29 |
-
'**/*.log',
|
30 |
-
'**/.DS_Store',
|
31 |
-
'**/npm-debug.log*',
|
32 |
-
'**/yarn-debug.log*',
|
33 |
-
'**/yarn-error.log*',
|
34 |
-
'**/*lock.json',
|
35 |
-
'**/*lock.yaml',
|
36 |
-
];
|
37 |
-
|
38 |
-
export function GitUrlImport() {
|
39 |
-
const [searchParams] = useSearchParams();
|
40 |
-
const { ready: historyReady, importChat } = useChatHistory();
|
41 |
-
const { ready: gitReady, gitClone } = useGit();
|
42 |
-
const [imported, setImported] = useState(false);
|
43 |
-
const [loading, setLoading] = useState(true);
|
44 |
-
|
45 |
-
const importRepo = async (repoUrl?: string) => {
|
46 |
-
if (!gitReady && !historyReady) {
|
47 |
-
return;
|
48 |
-
}
|
49 |
-
|
50 |
-
if (repoUrl) {
|
51 |
-
const ig = ignore().add(IGNORE_PATTERNS);
|
52 |
-
|
53 |
-
try {
|
54 |
-
const { workdir, data } = await gitClone(repoUrl);
|
55 |
-
|
56 |
-
if (importChat) {
|
57 |
-
const filePaths = Object.keys(data).filter((filePath) => !ig.ignores(filePath));
|
58 |
-
const textDecoder = new TextDecoder('utf-8');
|
59 |
-
|
60 |
-
const fileContents = filePaths
|
61 |
-
.map((filePath) => {
|
62 |
-
const { data: content, encoding } = data[filePath];
|
63 |
-
return {
|
64 |
-
path: filePath,
|
65 |
-
content:
|
66 |
-
encoding === 'utf8' ? content : content instanceof Uint8Array ? textDecoder.decode(content) : '',
|
67 |
-
};
|
68 |
-
})
|
69 |
-
.filter((f) => f.content);
|
70 |
-
|
71 |
-
const commands = await detectProjectCommands(fileContents);
|
72 |
-
const commandsMessage = createCommandsMessage(commands);
|
73 |
-
|
74 |
-
const filesMessage: Message = {
|
75 |
-
role: 'assistant',
|
76 |
-
content: `Cloning the repo ${repoUrl} into ${workdir}
|
77 |
-
<boltArtifact id="imported-files" title="Git Cloned Files" type="bundled">
|
78 |
-
${fileContents
|
79 |
-
.map(
|
80 |
-
(file) =>
|
81 |
-
`<boltAction type="file" filePath="${file.path}">
|
82 |
-
${file.content}
|
83 |
-
</boltAction>`,
|
84 |
-
)
|
85 |
-
.join('\n')}
|
86 |
-
</boltArtifact>`,
|
87 |
-
id: generateId(),
|
88 |
-
createdAt: new Date(),
|
89 |
-
};
|
90 |
-
|
91 |
-
const messages = [filesMessage];
|
92 |
-
|
93 |
-
if (commandsMessage) {
|
94 |
-
messages.push(commandsMessage);
|
95 |
-
}
|
96 |
-
|
97 |
-
await importChat(`Git Project:${repoUrl.split('/').slice(-1)[0]}`, messages);
|
98 |
-
}
|
99 |
-
} catch (error) {
|
100 |
-
console.error('Error during import:', error);
|
101 |
-
toast.error('Failed to import repository');
|
102 |
-
setLoading(false);
|
103 |
-
window.location.href = '/';
|
104 |
-
|
105 |
-
return;
|
106 |
-
}
|
107 |
-
}
|
108 |
-
};
|
109 |
-
|
110 |
-
useEffect(() => {
|
111 |
-
if (!historyReady || !gitReady || imported) {
|
112 |
-
return;
|
113 |
-
}
|
114 |
-
|
115 |
-
const url = searchParams.get('url');
|
116 |
-
|
117 |
-
if (!url) {
|
118 |
-
window.location.href = '/';
|
119 |
-
return;
|
120 |
-
}
|
121 |
-
|
122 |
-
importRepo(url).catch((error) => {
|
123 |
-
console.error('Error importing repo:', error);
|
124 |
-
toast.error('Failed to import repository');
|
125 |
-
setLoading(false);
|
126 |
-
window.location.href = '/';
|
127 |
-
});
|
128 |
-
setImported(true);
|
129 |
-
}, [searchParams, historyReady, gitReady, imported]);
|
130 |
-
|
131 |
-
return (
|
132 |
-
<ClientOnly fallback={<BaseChat />}>
|
133 |
-
{() => (
|
134 |
-
<>
|
135 |
-
<Chat />
|
136 |
-
{loading && <LoadingOverlay message="Please wait while we clone the repository..." />}
|
137 |
-
</>
|
138 |
-
)}
|
139 |
-
</ClientOnly>
|
140 |
-
);
|
141 |
-
}
|
|
|
1 |
+
import { useSearchParams } from '@remix-run/react';
|
2 |
+
import { generateId, type Message } from 'ai';
|
3 |
+
import ignore from 'ignore';
|
4 |
+
import { useEffect, useState } from 'react';
|
5 |
+
import { ClientOnly } from 'remix-utils/client-only';
|
6 |
+
import { BaseChat } from '~/components/chat/BaseChat';
|
7 |
+
import { Chat } from '~/components/chat/Chat.client';
|
8 |
+
import { useGit } from '~/lib/hooks/useGit';
|
9 |
+
import { useChatHistory } from '~/lib/persistence';
|
10 |
+
import { createCommandsMessage, detectProjectCommands } from '~/utils/projectCommands';
|
11 |
+
import { LoadingOverlay } from '~/components/ui/LoadingOverlay';
|
12 |
+
import { toast } from 'react-toastify';
|
13 |
+
|
14 |
+
const IGNORE_PATTERNS = [
|
15 |
+
'node_modules/**',
|
16 |
+
'.git/**',
|
17 |
+
'.github/**',
|
18 |
+
'.vscode/**',
|
19 |
+
'**/*.jpg',
|
20 |
+
'**/*.jpeg',
|
21 |
+
'**/*.png',
|
22 |
+
'dist/**',
|
23 |
+
'build/**',
|
24 |
+
'.next/**',
|
25 |
+
'coverage/**',
|
26 |
+
'.cache/**',
|
27 |
+
'.vscode/**',
|
28 |
+
'.idea/**',
|
29 |
+
'**/*.log',
|
30 |
+
'**/.DS_Store',
|
31 |
+
'**/npm-debug.log*',
|
32 |
+
'**/yarn-debug.log*',
|
33 |
+
'**/yarn-error.log*',
|
34 |
+
'**/*lock.json',
|
35 |
+
'**/*lock.yaml',
|
36 |
+
];
|
37 |
+
|
38 |
+
export function GitUrlImport() {
|
39 |
+
const [searchParams] = useSearchParams();
|
40 |
+
const { ready: historyReady, importChat } = useChatHistory();
|
41 |
+
const { ready: gitReady, gitClone } = useGit();
|
42 |
+
const [imported, setImported] = useState(false);
|
43 |
+
const [loading, setLoading] = useState(true);
|
44 |
+
|
45 |
+
const importRepo = async (repoUrl?: string) => {
|
46 |
+
if (!gitReady && !historyReady) {
|
47 |
+
return;
|
48 |
+
}
|
49 |
+
|
50 |
+
if (repoUrl) {
|
51 |
+
const ig = ignore().add(IGNORE_PATTERNS);
|
52 |
+
|
53 |
+
try {
|
54 |
+
const { workdir, data } = await gitClone(repoUrl);
|
55 |
+
|
56 |
+
if (importChat) {
|
57 |
+
const filePaths = Object.keys(data).filter((filePath) => !ig.ignores(filePath));
|
58 |
+
const textDecoder = new TextDecoder('utf-8');
|
59 |
+
|
60 |
+
const fileContents = filePaths
|
61 |
+
.map((filePath) => {
|
62 |
+
const { data: content, encoding } = data[filePath];
|
63 |
+
return {
|
64 |
+
path: filePath,
|
65 |
+
content:
|
66 |
+
encoding === 'utf8' ? content : content instanceof Uint8Array ? textDecoder.decode(content) : '',
|
67 |
+
};
|
68 |
+
})
|
69 |
+
.filter((f) => f.content);
|
70 |
+
|
71 |
+
const commands = await detectProjectCommands(fileContents);
|
72 |
+
const commandsMessage = createCommandsMessage(commands);
|
73 |
+
|
74 |
+
const filesMessage: Message = {
|
75 |
+
role: 'assistant',
|
76 |
+
content: `Cloning the repo ${repoUrl} into ${workdir}
|
77 |
+
<boltArtifact id="imported-files" title="Git Cloned Files" type="bundled">
|
78 |
+
${fileContents
|
79 |
+
.map(
|
80 |
+
(file) =>
|
81 |
+
`<boltAction type="file" filePath="${file.path}">
|
82 |
+
${file.content}
|
83 |
+
</boltAction>`,
|
84 |
+
)
|
85 |
+
.join('\n')}
|
86 |
+
</boltArtifact>`,
|
87 |
+
id: generateId(),
|
88 |
+
createdAt: new Date(),
|
89 |
+
};
|
90 |
+
|
91 |
+
const messages = [filesMessage];
|
92 |
+
|
93 |
+
if (commandsMessage) {
|
94 |
+
messages.push(commandsMessage);
|
95 |
+
}
|
96 |
+
|
97 |
+
await importChat(`Git Project:${repoUrl.split('/').slice(-1)[0]}`, messages);
|
98 |
+
}
|
99 |
+
} catch (error) {
|
100 |
+
console.error('Error during import:', error);
|
101 |
+
toast.error('Failed to import repository');
|
102 |
+
setLoading(false);
|
103 |
+
window.location.href = '/';
|
104 |
+
|
105 |
+
return;
|
106 |
+
}
|
107 |
+
}
|
108 |
+
};
|
109 |
+
|
110 |
+
useEffect(() => {
|
111 |
+
if (!historyReady || !gitReady || imported) {
|
112 |
+
return;
|
113 |
+
}
|
114 |
+
|
115 |
+
const url = searchParams.get('url');
|
116 |
+
|
117 |
+
if (!url) {
|
118 |
+
window.location.href = '/';
|
119 |
+
return;
|
120 |
+
}
|
121 |
+
|
122 |
+
importRepo(url).catch((error) => {
|
123 |
+
console.error('Error importing repo:', error);
|
124 |
+
toast.error('Failed to import repository');
|
125 |
+
setLoading(false);
|
126 |
+
window.location.href = '/';
|
127 |
+
});
|
128 |
+
setImported(true);
|
129 |
+
}, [searchParams, historyReady, gitReady, imported]);
|
130 |
+
|
131 |
+
return (
|
132 |
+
<ClientOnly fallback={<BaseChat />}>
|
133 |
+
{() => (
|
134 |
+
<>
|
135 |
+
<Chat />
|
136 |
+
{loading && <LoadingOverlay message="Please wait while we clone the repository..." />}
|
137 |
+
</>
|
138 |
+
)}
|
139 |
+
</ClientOnly>
|
140 |
+
);
|
141 |
+
}
|
app/components/settings/data/DataTab.tsx
CHANGED
@@ -2,9 +2,10 @@ import React, { useState } from 'react';
|
|
2 |
import { useNavigate } from '@remix-run/react';
|
3 |
import Cookies from 'js-cookie';
|
4 |
import { toast } from 'react-toastify';
|
5 |
-
import { db, deleteById, getAll } from '~/lib/persistence';
|
6 |
import { logStore } from '~/lib/stores/logs';
|
7 |
import { classNames } from '~/utils/classNames';
|
|
|
8 |
|
9 |
// List of supported providers that can have API keys
|
10 |
const API_KEY_PROVIDERS = [
|
@@ -22,6 +23,7 @@ const API_KEY_PROVIDERS = [
|
|
22 |
'Perplexity',
|
23 |
'Cohere',
|
24 |
'AzureOpenAI',
|
|
|
25 |
] as const;
|
26 |
|
27 |
interface ApiKeys {
|
@@ -231,6 +233,81 @@ export default function DataTab() {
|
|
231 |
event.target.value = '';
|
232 |
};
|
233 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
234 |
return (
|
235 |
<div className="p-4 bg-bolt-elements-bg-depth-2 border border-bolt-elements-borderColor rounded-lg mb-4">
|
236 |
<div className="mb-6">
|
@@ -247,6 +324,12 @@ export default function DataTab() {
|
|
247 |
>
|
248 |
Export All Chats
|
249 |
</button>
|
|
|
|
|
|
|
|
|
|
|
|
|
250 |
<button
|
251 |
onClick={handleDeleteAllChats}
|
252 |
disabled={isDeleting}
|
|
|
2 |
import { useNavigate } from '@remix-run/react';
|
3 |
import Cookies from 'js-cookie';
|
4 |
import { toast } from 'react-toastify';
|
5 |
+
import { db, deleteById, getAll, setMessages } from '~/lib/persistence';
|
6 |
import { logStore } from '~/lib/stores/logs';
|
7 |
import { classNames } from '~/utils/classNames';
|
8 |
+
import type { Message } from 'ai';
|
9 |
|
10 |
// List of supported providers that can have API keys
|
11 |
const API_KEY_PROVIDERS = [
|
|
|
23 |
'Perplexity',
|
24 |
'Cohere',
|
25 |
'AzureOpenAI',
|
26 |
+
'AmazonBedrock',
|
27 |
] as const;
|
28 |
|
29 |
interface ApiKeys {
|
|
|
233 |
event.target.value = '';
|
234 |
};
|
235 |
|
236 |
+
const processChatData = (
|
237 |
+
data: any,
|
238 |
+
): Array<{
|
239 |
+
id: string;
|
240 |
+
messages: Message[];
|
241 |
+
description: string;
|
242 |
+
urlId?: string;
|
243 |
+
}> => {
|
244 |
+
// Handle Bolt standard format (single chat)
|
245 |
+
if (data.messages && Array.isArray(data.messages)) {
|
246 |
+
const chatId = crypto.randomUUID();
|
247 |
+
return [
|
248 |
+
{
|
249 |
+
id: chatId,
|
250 |
+
messages: data.messages,
|
251 |
+
description: data.description || 'Imported Chat',
|
252 |
+
urlId: chatId,
|
253 |
+
},
|
254 |
+
];
|
255 |
+
}
|
256 |
+
|
257 |
+
// Handle Bolt export format (multiple chats)
|
258 |
+
if (data.chats && Array.isArray(data.chats)) {
|
259 |
+
return data.chats.map((chat: { id?: string; messages: Message[]; description?: string; urlId?: string }) => ({
|
260 |
+
id: chat.id || crypto.randomUUID(),
|
261 |
+
messages: chat.messages,
|
262 |
+
description: chat.description || 'Imported Chat',
|
263 |
+
urlId: chat.urlId,
|
264 |
+
}));
|
265 |
+
}
|
266 |
+
|
267 |
+
console.error('No matching format found for:', data);
|
268 |
+
throw new Error('Unsupported chat format');
|
269 |
+
};
|
270 |
+
|
271 |
+
const handleImportChats = () => {
|
272 |
+
const input = document.createElement('input');
|
273 |
+
input.type = 'file';
|
274 |
+
input.accept = '.json';
|
275 |
+
|
276 |
+
input.onchange = async (e) => {
|
277 |
+
const file = (e.target as HTMLInputElement).files?.[0];
|
278 |
+
|
279 |
+
if (!file || !db) {
|
280 |
+
toast.error('Something went wrong');
|
281 |
+
return;
|
282 |
+
}
|
283 |
+
|
284 |
+
try {
|
285 |
+
const content = await file.text();
|
286 |
+
const data = JSON.parse(content);
|
287 |
+
const chatsToImport = processChatData(data);
|
288 |
+
|
289 |
+
for (const chat of chatsToImport) {
|
290 |
+
await setMessages(db, chat.id, chat.messages, chat.urlId, chat.description);
|
291 |
+
}
|
292 |
+
|
293 |
+
logStore.logSystem('Chats imported successfully', { count: chatsToImport.length });
|
294 |
+
toast.success(`Successfully imported ${chatsToImport.length} chat${chatsToImport.length > 1 ? 's' : ''}`);
|
295 |
+
window.location.reload();
|
296 |
+
} catch (error) {
|
297 |
+
if (error instanceof Error) {
|
298 |
+
logStore.logError('Failed to import chats:', error);
|
299 |
+
toast.error('Failed to import chats: ' + error.message);
|
300 |
+
} else {
|
301 |
+
toast.error('Failed to import chats');
|
302 |
+
}
|
303 |
+
|
304 |
+
console.error(error);
|
305 |
+
}
|
306 |
+
};
|
307 |
+
|
308 |
+
input.click();
|
309 |
+
};
|
310 |
+
|
311 |
return (
|
312 |
<div className="p-4 bg-bolt-elements-bg-depth-2 border border-bolt-elements-borderColor rounded-lg mb-4">
|
313 |
<div className="mb-6">
|
|
|
324 |
>
|
325 |
Export All Chats
|
326 |
</button>
|
327 |
+
<button
|
328 |
+
onClick={handleImportChats}
|
329 |
+
className="px-4 py-2 bg-bolt-elements-button-primary-background hover:bg-bolt-elements-button-primary-backgroundHover text-bolt-elements-textPrimary rounded-lg transition-colors"
|
330 |
+
>
|
331 |
+
Import Chats
|
332 |
+
</button>
|
333 |
<button
|
334 |
onClick={handleDeleteAllChats}
|
335 |
disabled={isDeleting}
|
app/components/ui/LoadingOverlay.tsx
CHANGED
@@ -1,32 +1,32 @@
|
|
1 |
-
export const LoadingOverlay = ({
|
2 |
-
message = 'Loading...',
|
3 |
-
progress,
|
4 |
-
progressText,
|
5 |
-
}: {
|
6 |
-
message?: string;
|
7 |
-
progress?: number;
|
8 |
-
progressText?: string;
|
9 |
-
}) => {
|
10 |
-
return (
|
11 |
-
<div className="fixed inset-0 flex items-center justify-center bg-black/80 z-50 backdrop-blur-sm">
|
12 |
-
<div className="relative flex flex-col items-center gap-4 p-8 rounded-lg bg-bolt-elements-background-depth-2 shadow-lg">
|
13 |
-
<div
|
14 |
-
className={'i-svg-spinners:90-ring-with-bg text-bolt-elements-loader-progress'}
|
15 |
-
style={{ fontSize: '2rem' }}
|
16 |
-
></div>
|
17 |
-
<p className="text-lg text-bolt-elements-textTertiary">{message}</p>
|
18 |
-
{progress !== undefined && (
|
19 |
-
<div className="w-64 flex flex-col gap-2">
|
20 |
-
<div className="w-full h-2 bg-bolt-elements-background-depth-1 rounded-full overflow-hidden">
|
21 |
-
<div
|
22 |
-
className="h-full bg-bolt-elements-loader-progress transition-all duration-300 ease-out rounded-full"
|
23 |
-
style={{ width: `${Math.min(100, Math.max(0, progress))}%` }}
|
24 |
-
/>
|
25 |
-
</div>
|
26 |
-
{progressText && <p className="text-sm text-bolt-elements-textTertiary text-center">{progressText}</p>}
|
27 |
-
</div>
|
28 |
-
)}
|
29 |
-
</div>
|
30 |
-
</div>
|
31 |
-
);
|
32 |
-
};
|
|
|
1 |
+
export const LoadingOverlay = ({
|
2 |
+
message = 'Loading...',
|
3 |
+
progress,
|
4 |
+
progressText,
|
5 |
+
}: {
|
6 |
+
message?: string;
|
7 |
+
progress?: number;
|
8 |
+
progressText?: string;
|
9 |
+
}) => {
|
10 |
+
return (
|
11 |
+
<div className="fixed inset-0 flex items-center justify-center bg-black/80 z-50 backdrop-blur-sm">
|
12 |
+
<div className="relative flex flex-col items-center gap-4 p-8 rounded-lg bg-bolt-elements-background-depth-2 shadow-lg">
|
13 |
+
<div
|
14 |
+
className={'i-svg-spinners:90-ring-with-bg text-bolt-elements-loader-progress'}
|
15 |
+
style={{ fontSize: '2rem' }}
|
16 |
+
></div>
|
17 |
+
<p className="text-lg text-bolt-elements-textTertiary">{message}</p>
|
18 |
+
{progress !== undefined && (
|
19 |
+
<div className="w-64 flex flex-col gap-2">
|
20 |
+
<div className="w-full h-2 bg-bolt-elements-background-depth-1 rounded-full overflow-hidden">
|
21 |
+
<div
|
22 |
+
className="h-full bg-bolt-elements-loader-progress transition-all duration-300 ease-out rounded-full"
|
23 |
+
style={{ width: `${Math.min(100, Math.max(0, progress))}%` }}
|
24 |
+
/>
|
25 |
+
</div>
|
26 |
+
{progressText && <p className="text-sm text-bolt-elements-textTertiary text-center">{progressText}</p>}
|
27 |
+
</div>
|
28 |
+
)}
|
29 |
+
</div>
|
30 |
+
</div>
|
31 |
+
);
|
32 |
+
};
|
app/components/workbench/Preview.tsx
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
-
import { useStore } from '@nanostores/react';
|
2 |
import { memo, useCallback, useEffect, useRef, useState } from 'react';
|
|
|
3 |
import { IconButton } from '~/components/ui/IconButton';
|
4 |
import { workbenchStore } from '~/lib/stores/workbench';
|
5 |
import { PortDropdown } from './PortDropdown';
|
@@ -7,6 +7,20 @@ import { ScreenshotSelector } from './ScreenshotSelector';
|
|
7 |
|
8 |
type ResizeSide = 'left' | 'right' | null;
|
9 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
export const Preview = memo(() => {
|
11 |
const iframeRef = useRef<HTMLIFrameElement>(null);
|
12 |
const containerRef = useRef<HTMLDivElement>(null);
|
@@ -15,6 +29,7 @@ export const Preview = memo(() => {
|
|
15 |
const [activePreviewIndex, setActivePreviewIndex] = useState(0);
|
16 |
const [isPortDropdownOpen, setIsPortDropdownOpen] = useState(false);
|
17 |
const [isFullscreen, setIsFullscreen] = useState(false);
|
|
|
18 |
const hasSelectedPreview = useRef(false);
|
19 |
const previews = useStore(workbenchStore.previews);
|
20 |
const activePreview = previews[activePreviewIndex];
|
@@ -27,7 +42,7 @@ export const Preview = memo(() => {
|
|
27 |
const [isDeviceModeOn, setIsDeviceModeOn] = useState(false);
|
28 |
|
29 |
// Use percentage for width
|
30 |
-
const [widthPercent, setWidthPercent] = useState<number>(37.5);
|
31 |
|
32 |
const resizingState = useRef({
|
33 |
isResizing: false,
|
@@ -37,8 +52,10 @@ export const Preview = memo(() => {
|
|
37 |
windowWidth: window.innerWidth,
|
38 |
});
|
39 |
|
40 |
-
|
41 |
-
|
|
|
|
|
42 |
|
43 |
useEffect(() => {
|
44 |
if (!activePreview) {
|
@@ -79,7 +96,6 @@ export const Preview = memo(() => {
|
|
79 |
[],
|
80 |
);
|
81 |
|
82 |
-
// When previews change, display the lowest port if user hasn't selected a preview
|
83 |
useEffect(() => {
|
84 |
if (previews.length > 1 && !hasSelectedPreview.current) {
|
85 |
const minPortIndex = previews.reduce(findMinPortIndex, 0);
|
@@ -122,7 +138,6 @@ export const Preview = memo(() => {
|
|
122 |
return;
|
123 |
}
|
124 |
|
125 |
-
// Prevent text selection
|
126 |
document.body.style.userSelect = 'none';
|
127 |
|
128 |
resizingState.current.isResizing = true;
|
@@ -134,7 +149,7 @@ export const Preview = memo(() => {
|
|
134 |
document.addEventListener('mousemove', onMouseMove);
|
135 |
document.addEventListener('mouseup', onMouseUp);
|
136 |
|
137 |
-
e.preventDefault();
|
138 |
};
|
139 |
|
140 |
const onMouseMove = (e: MouseEvent) => {
|
@@ -145,7 +160,6 @@ export const Preview = memo(() => {
|
|
145 |
const dx = e.clientX - resizingState.current.startX;
|
146 |
const windowWidth = resizingState.current.windowWidth;
|
147 |
|
148 |
-
// Apply scaling factor to increase sensitivity
|
149 |
const dxPercent = (dx / windowWidth) * 100 * SCALING_FACTOR;
|
150 |
|
151 |
let newWidthPercent = resizingState.current.startWidthPercent;
|
@@ -156,7 +170,6 @@ export const Preview = memo(() => {
|
|
156 |
newWidthPercent = resizingState.current.startWidthPercent - dxPercent;
|
157 |
}
|
158 |
|
159 |
-
// Clamp the width between 10% and 90%
|
160 |
newWidthPercent = Math.max(10, Math.min(newWidthPercent, 90));
|
161 |
|
162 |
setWidthPercent(newWidthPercent);
|
@@ -168,17 +181,12 @@ export const Preview = memo(() => {
|
|
168 |
document.removeEventListener('mousemove', onMouseMove);
|
169 |
document.removeEventListener('mouseup', onMouseUp);
|
170 |
|
171 |
-
// Restore text selection
|
172 |
document.body.style.userSelect = '';
|
173 |
};
|
174 |
|
175 |
-
// Handle window resize to ensure widthPercent remains valid
|
176 |
useEffect(() => {
|
177 |
const handleWindowResize = () => {
|
178 |
-
|
179 |
-
* Optional: Adjust widthPercent if necessary
|
180 |
-
* For now, since widthPercent is relative, no action is needed
|
181 |
-
*/
|
182 |
};
|
183 |
|
184 |
window.addEventListener('resize', handleWindowResize);
|
@@ -188,7 +196,6 @@ export const Preview = memo(() => {
|
|
188 |
};
|
189 |
}, []);
|
190 |
|
191 |
-
// A small helper component for the handle's "grip" icon
|
192 |
const GripIcon = () => (
|
193 |
<div
|
194 |
style={{
|
@@ -213,22 +220,47 @@ export const Preview = memo(() => {
|
|
213 |
</div>
|
214 |
);
|
215 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
216 |
return (
|
217 |
-
<div
|
|
|
|
|
|
|
218 |
{isPortDropdownOpen && (
|
219 |
<div className="z-iframe-overlay w-full h-full absolute" onClick={() => setIsPortDropdownOpen(false)} />
|
220 |
)}
|
221 |
-
<div className="bg-bolt-elements-background-depth-2 p-2 flex items-center gap-
|
222 |
-
<
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
>
|
232 |
<input
|
233 |
title="URL"
|
234 |
ref={inputRef}
|
@@ -250,39 +282,90 @@ export const Preview = memo(() => {
|
|
250 |
/>
|
251 |
</div>
|
252 |
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
257 |
-
|
258 |
-
|
259 |
-
|
260 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
261 |
/>
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
|
269 |
-
|
270 |
-
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
277 |
</div>
|
278 |
|
279 |
<div className="flex-1 border-t border-bolt-elements-borderColor flex justify-center items-center overflow-auto">
|
280 |
<div
|
281 |
style={{
|
282 |
width: isDeviceModeOn ? `${widthPercent}%` : '100%',
|
283 |
-
height: '100%',
|
284 |
overflow: 'visible',
|
285 |
-
background: '
|
286 |
position: 'relative',
|
287 |
display: 'flex',
|
288 |
}}
|
@@ -292,9 +375,10 @@ export const Preview = memo(() => {
|
|
292 |
<iframe
|
293 |
ref={iframeRef}
|
294 |
title="preview"
|
295 |
-
className="border-none w-full h-full bg-
|
296 |
src={iframeUrl}
|
297 |
-
|
|
|
298 |
/>
|
299 |
<ScreenshotSelector
|
300 |
isSelectionMode={isSelectionMode}
|
@@ -303,12 +387,13 @@ export const Preview = memo(() => {
|
|
303 |
/>
|
304 |
</>
|
305 |
) : (
|
306 |
-
<div className="flex w-full h-full justify-center items-center bg-
|
|
|
|
|
307 |
)}
|
308 |
|
309 |
{isDeviceModeOn && (
|
310 |
<>
|
311 |
-
{/* Left handle */}
|
312 |
<div
|
313 |
onMouseDown={(e) => startResizing(e, 'left')}
|
314 |
style={{
|
@@ -333,7 +418,6 @@ export const Preview = memo(() => {
|
|
333 |
<GripIcon />
|
334 |
</div>
|
335 |
|
336 |
-
{/* Right handle */}
|
337 |
<div
|
338 |
onMouseDown={(e) => startResizing(e, 'right')}
|
339 |
style={{
|
|
|
|
|
1 |
import { memo, useCallback, useEffect, useRef, useState } from 'react';
|
2 |
+
import { useStore } from '@nanostores/react';
|
3 |
import { IconButton } from '~/components/ui/IconButton';
|
4 |
import { workbenchStore } from '~/lib/stores/workbench';
|
5 |
import { PortDropdown } from './PortDropdown';
|
|
|
7 |
|
8 |
type ResizeSide = 'left' | 'right' | null;
|
9 |
|
10 |
+
interface WindowSize {
|
11 |
+
name: string;
|
12 |
+
width: number;
|
13 |
+
height: number;
|
14 |
+
icon: string;
|
15 |
+
}
|
16 |
+
|
17 |
+
const WINDOW_SIZES: WindowSize[] = [
|
18 |
+
{ name: 'Mobile', width: 375, height: 667, icon: 'i-ph:device-mobile' },
|
19 |
+
{ name: 'Tablet', width: 768, height: 1024, icon: 'i-ph:device-tablet' },
|
20 |
+
{ name: 'Laptop', width: 1366, height: 768, icon: 'i-ph:laptop' },
|
21 |
+
{ name: 'Desktop', width: 1920, height: 1080, icon: 'i-ph:monitor' },
|
22 |
+
];
|
23 |
+
|
24 |
export const Preview = memo(() => {
|
25 |
const iframeRef = useRef<HTMLIFrameElement>(null);
|
26 |
const containerRef = useRef<HTMLDivElement>(null);
|
|
|
29 |
const [activePreviewIndex, setActivePreviewIndex] = useState(0);
|
30 |
const [isPortDropdownOpen, setIsPortDropdownOpen] = useState(false);
|
31 |
const [isFullscreen, setIsFullscreen] = useState(false);
|
32 |
+
const [isPreviewOnly, setIsPreviewOnly] = useState(false);
|
33 |
const hasSelectedPreview = useRef(false);
|
34 |
const previews = useStore(workbenchStore.previews);
|
35 |
const activePreview = previews[activePreviewIndex];
|
|
|
42 |
const [isDeviceModeOn, setIsDeviceModeOn] = useState(false);
|
43 |
|
44 |
// Use percentage for width
|
45 |
+
const [widthPercent, setWidthPercent] = useState<number>(37.5);
|
46 |
|
47 |
const resizingState = useRef({
|
48 |
isResizing: false,
|
|
|
52 |
windowWidth: window.innerWidth,
|
53 |
});
|
54 |
|
55 |
+
const SCALING_FACTOR = 2;
|
56 |
+
|
57 |
+
const [isWindowSizeDropdownOpen, setIsWindowSizeDropdownOpen] = useState(false);
|
58 |
+
const [selectedWindowSize, setSelectedWindowSize] = useState<WindowSize>(WINDOW_SIZES[0]);
|
59 |
|
60 |
useEffect(() => {
|
61 |
if (!activePreview) {
|
|
|
96 |
[],
|
97 |
);
|
98 |
|
|
|
99 |
useEffect(() => {
|
100 |
if (previews.length > 1 && !hasSelectedPreview.current) {
|
101 |
const minPortIndex = previews.reduce(findMinPortIndex, 0);
|
|
|
138 |
return;
|
139 |
}
|
140 |
|
|
|
141 |
document.body.style.userSelect = 'none';
|
142 |
|
143 |
resizingState.current.isResizing = true;
|
|
|
149 |
document.addEventListener('mousemove', onMouseMove);
|
150 |
document.addEventListener('mouseup', onMouseUp);
|
151 |
|
152 |
+
e.preventDefault();
|
153 |
};
|
154 |
|
155 |
const onMouseMove = (e: MouseEvent) => {
|
|
|
160 |
const dx = e.clientX - resizingState.current.startX;
|
161 |
const windowWidth = resizingState.current.windowWidth;
|
162 |
|
|
|
163 |
const dxPercent = (dx / windowWidth) * 100 * SCALING_FACTOR;
|
164 |
|
165 |
let newWidthPercent = resizingState.current.startWidthPercent;
|
|
|
170 |
newWidthPercent = resizingState.current.startWidthPercent - dxPercent;
|
171 |
}
|
172 |
|
|
|
173 |
newWidthPercent = Math.max(10, Math.min(newWidthPercent, 90));
|
174 |
|
175 |
setWidthPercent(newWidthPercent);
|
|
|
181 |
document.removeEventListener('mousemove', onMouseMove);
|
182 |
document.removeEventListener('mouseup', onMouseUp);
|
183 |
|
|
|
184 |
document.body.style.userSelect = '';
|
185 |
};
|
186 |
|
|
|
187 |
useEffect(() => {
|
188 |
const handleWindowResize = () => {
|
189 |
+
// Optional: Adjust widthPercent if necessary
|
|
|
|
|
|
|
190 |
};
|
191 |
|
192 |
window.addEventListener('resize', handleWindowResize);
|
|
|
196 |
};
|
197 |
}, []);
|
198 |
|
|
|
199 |
const GripIcon = () => (
|
200 |
<div
|
201 |
style={{
|
|
|
220 |
</div>
|
221 |
);
|
222 |
|
223 |
+
const openInNewWindow = (size: WindowSize) => {
|
224 |
+
if (activePreview?.baseUrl) {
|
225 |
+
const match = activePreview.baseUrl.match(/^https?:\/\/([^.]+)\.local-credentialless\.webcontainer-api\.io/);
|
226 |
+
|
227 |
+
if (match) {
|
228 |
+
const previewId = match[1];
|
229 |
+
const previewUrl = `/webcontainer/preview/${previewId}`;
|
230 |
+
const newWindow = window.open(
|
231 |
+
previewUrl,
|
232 |
+
'_blank',
|
233 |
+
`noopener,noreferrer,width=${size.width},height=${size.height},menubar=no,toolbar=no,location=no,status=no`,
|
234 |
+
);
|
235 |
+
|
236 |
+
if (newWindow) {
|
237 |
+
newWindow.focus();
|
238 |
+
}
|
239 |
+
} else {
|
240 |
+
console.warn('[Preview] Invalid WebContainer URL:', activePreview.baseUrl);
|
241 |
+
}
|
242 |
+
}
|
243 |
+
};
|
244 |
+
|
245 |
return (
|
246 |
+
<div
|
247 |
+
ref={containerRef}
|
248 |
+
className={`w-full h-full flex flex-col relative ${isPreviewOnly ? 'fixed inset-0 z-50 bg-white' : ''}`}
|
249 |
+
>
|
250 |
{isPortDropdownOpen && (
|
251 |
<div className="z-iframe-overlay w-full h-full absolute" onClick={() => setIsPortDropdownOpen(false)} />
|
252 |
)}
|
253 |
+
<div className="bg-bolt-elements-background-depth-2 p-2 flex items-center gap-2">
|
254 |
+
<div className="flex items-center gap-2">
|
255 |
+
<IconButton icon="i-ph:arrow-clockwise" onClick={reloadPreview} />
|
256 |
+
<IconButton
|
257 |
+
icon="i-ph:selection"
|
258 |
+
onClick={() => setIsSelectionMode(!isSelectionMode)}
|
259 |
+
className={isSelectionMode ? 'bg-bolt-elements-background-depth-3' : ''}
|
260 |
+
/>
|
261 |
+
</div>
|
262 |
+
|
263 |
+
<div className="flex-grow flex items-center gap-1 bg-bolt-elements-preview-addressBar-background border border-bolt-elements-borderColor text-bolt-elements-preview-addressBar-text rounded-full px-3 py-1 text-sm hover:bg-bolt-elements-preview-addressBar-backgroundHover hover:focus-within:bg-bolt-elements-preview-addressBar-backgroundActive focus-within:bg-bolt-elements-preview-addressBar-backgroundActive focus-within-border-bolt-elements-borderColorActive focus-within:text-bolt-elements-preview-addressBar-textActive">
|
264 |
<input
|
265 |
title="URL"
|
266 |
ref={inputRef}
|
|
|
282 |
/>
|
283 |
</div>
|
284 |
|
285 |
+
<div className="flex items-center gap-2">
|
286 |
+
{previews.length > 1 && (
|
287 |
+
<PortDropdown
|
288 |
+
activePreviewIndex={activePreviewIndex}
|
289 |
+
setActivePreviewIndex={setActivePreviewIndex}
|
290 |
+
isDropdownOpen={isPortDropdownOpen}
|
291 |
+
setHasSelectedPreview={(value) => (hasSelectedPreview.current = value)}
|
292 |
+
setIsDropdownOpen={setIsPortDropdownOpen}
|
293 |
+
previews={previews}
|
294 |
+
/>
|
295 |
+
)}
|
296 |
+
|
297 |
+
<IconButton
|
298 |
+
icon="i-ph:devices"
|
299 |
+
onClick={toggleDeviceMode}
|
300 |
+
title={isDeviceModeOn ? 'Switch to Responsive Mode' : 'Switch to Device Mode'}
|
301 |
/>
|
302 |
+
|
303 |
+
<IconButton
|
304 |
+
icon="i-ph:layout-light"
|
305 |
+
onClick={() => setIsPreviewOnly(!isPreviewOnly)}
|
306 |
+
title={isPreviewOnly ? 'Show Full Interface' : 'Show Preview Only'}
|
307 |
+
/>
|
308 |
+
|
309 |
+
<IconButton
|
310 |
+
icon={isFullscreen ? 'i-ph:arrows-in' : 'i-ph:arrows-out'}
|
311 |
+
onClick={toggleFullscreen}
|
312 |
+
title={isFullscreen ? 'Exit Full Screen' : 'Full Screen'}
|
313 |
+
/>
|
314 |
+
|
315 |
+
<div className="flex items-center relative">
|
316 |
+
<IconButton
|
317 |
+
icon="i-ph:arrow-square-out"
|
318 |
+
onClick={() => openInNewWindow(selectedWindowSize)}
|
319 |
+
title={`Open Preview in ${selectedWindowSize.name} Window`}
|
320 |
+
/>
|
321 |
+
<IconButton
|
322 |
+
icon="i-ph:caret-down"
|
323 |
+
onClick={() => setIsWindowSizeDropdownOpen(!isWindowSizeDropdownOpen)}
|
324 |
+
className="ml-1"
|
325 |
+
title="Select Window Size"
|
326 |
+
/>
|
327 |
+
|
328 |
+
{isWindowSizeDropdownOpen && (
|
329 |
+
<>
|
330 |
+
<div className="fixed inset-0 z-50" onClick={() => setIsWindowSizeDropdownOpen(false)} />
|
331 |
+
<div className="absolute right-0 top-full mt-2 z-50 min-w-[240px] bg-white dark:bg-black rounded-xl shadow-2xl border border-[#E5E7EB] dark:border-[rgba(255,255,255,0.1)] overflow-hidden">
|
332 |
+
{WINDOW_SIZES.map((size) => (
|
333 |
+
<button
|
334 |
+
key={size.name}
|
335 |
+
className="w-full px-4 py-3.5 text-left text-[#111827] dark:text-gray-300 text-sm whitespace-nowrap flex items-center gap-3 group hover:bg-[#F5EEFF] dark:hover:bg-gray-900 bg-white dark:bg-black"
|
336 |
+
onClick={() => {
|
337 |
+
setSelectedWindowSize(size);
|
338 |
+
setIsWindowSizeDropdownOpen(false);
|
339 |
+
openInNewWindow(size);
|
340 |
+
}}
|
341 |
+
>
|
342 |
+
<div
|
343 |
+
className={`${size.icon} w-5 h-5 text-[#6B7280] dark:text-gray-400 group-hover:text-[#6D28D9] dark:group-hover:text-[#6D28D9] transition-colors duration-200`}
|
344 |
+
/>
|
345 |
+
<div className="flex flex-col">
|
346 |
+
<span className="font-medium group-hover:text-[#6D28D9] dark:group-hover:text-[#6D28D9] transition-colors duration-200">
|
347 |
+
{size.name}
|
348 |
+
</span>
|
349 |
+
<span className="text-xs text-[#6B7280] dark:text-gray-400 group-hover:text-[#6D28D9] dark:group-hover:text-[#6D28D9] transition-colors duration-200">
|
350 |
+
{size.width} × {size.height}
|
351 |
+
</span>
|
352 |
+
</div>
|
353 |
+
</button>
|
354 |
+
))}
|
355 |
+
</div>
|
356 |
+
</>
|
357 |
+
)}
|
358 |
+
</div>
|
359 |
+
</div>
|
360 |
</div>
|
361 |
|
362 |
<div className="flex-1 border-t border-bolt-elements-borderColor flex justify-center items-center overflow-auto">
|
363 |
<div
|
364 |
style={{
|
365 |
width: isDeviceModeOn ? `${widthPercent}%` : '100%',
|
366 |
+
height: '100%',
|
367 |
overflow: 'visible',
|
368 |
+
background: 'var(--bolt-elements-background-depth-1)',
|
369 |
position: 'relative',
|
370 |
display: 'flex',
|
371 |
}}
|
|
|
375 |
<iframe
|
376 |
ref={iframeRef}
|
377 |
title="preview"
|
378 |
+
className="border-none w-full h-full bg-bolt-elements-background-depth-1"
|
379 |
src={iframeUrl}
|
380 |
+
sandbox="allow-scripts allow-forms allow-popups allow-modals allow-storage-access-by-user-activation allow-same-origin"
|
381 |
+
allow="cross-origin-isolated"
|
382 |
/>
|
383 |
<ScreenshotSelector
|
384 |
isSelectionMode={isSelectionMode}
|
|
|
387 |
/>
|
388 |
</>
|
389 |
) : (
|
390 |
+
<div className="flex w-full h-full justify-center items-center bg-bolt-elements-background-depth-1 text-bolt-elements-textPrimary">
|
391 |
+
No preview available
|
392 |
+
</div>
|
393 |
)}
|
394 |
|
395 |
{isDeviceModeOn && (
|
396 |
<>
|
|
|
397 |
<div
|
398 |
onMouseDown={(e) => startResizing(e, 'left')}
|
399 |
style={{
|
|
|
418 |
<GripIcon />
|
419 |
</div>
|
420 |
|
|
|
421 |
<div
|
422 |
onMouseDown={(e) => startResizing(e, 'right')}
|
423 |
style={{
|
app/lib/.server/llm/stream-text.ts
CHANGED
@@ -1,300 +1,300 @@
|
|
1 |
-
import { convertToCoreMessages, streamText as _streamText, type CoreMessage } from 'ai';
|
2 |
-
import fs from 'fs';
|
3 |
-
import { MAX_TOKENS } from './constants';
|
4 |
-
import { getSystemPrompt } from '~/lib/common/prompts/prompts';
|
5 |
-
import {
|
6 |
-
DEFAULT_MODEL,
|
7 |
-
DEFAULT_PROVIDER,
|
8 |
-
MODEL_REGEX,
|
9 |
-
MODIFICATIONS_TAG_NAME,
|
10 |
-
PROVIDER_LIST,
|
11 |
-
PROVIDER_REGEX,
|
12 |
-
WORK_DIR,
|
13 |
-
} from '~/utils/constants';
|
14 |
-
import ignore from 'ignore';
|
15 |
-
import type { IProviderSetting } from '~/types/model';
|
16 |
-
import { PromptLibrary } from '~/lib/common/prompt-library';
|
17 |
-
import { allowedHTMLElements } from '~/utils/markdown';
|
18 |
-
import { LLMManager } from '~/lib/modules/llm/manager';
|
19 |
-
import { createScopedLogger } from '~/utils/logger';
|
20 |
-
|
21 |
-
interface ToolResult<Name extends string, Args, Result> {
|
22 |
-
toolCallId: string;
|
23 |
-
toolName: Name;
|
24 |
-
args: Args;
|
25 |
-
result: Result;
|
26 |
-
}
|
27 |
-
|
28 |
-
interface Message {
|
29 |
-
role: 'user' | 'assistant';
|
30 |
-
content: string;
|
31 |
-
toolInvocations?: ToolResult<string, unknown, unknown>[];
|
32 |
-
model?: string;
|
33 |
-
}
|
34 |
-
|
35 |
-
export type Messages = Message[];
|
36 |
-
|
37 |
-
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
|
38 |
-
|
39 |
-
export interface File {
|
40 |
-
type: 'file';
|
41 |
-
content: string;
|
42 |
-
isBinary: boolean;
|
43 |
-
}
|
44 |
-
|
45 |
-
export interface Folder {
|
46 |
-
type: 'folder';
|
47 |
-
}
|
48 |
-
|
49 |
-
type Dirent = File | Folder;
|
50 |
-
|
51 |
-
export type FileMap = Record<string, Dirent | undefined>;
|
52 |
-
|
53 |
-
export function simplifyBoltActions(input: string): string {
|
54 |
-
// Using regex to match boltAction tags that have type="file"
|
55 |
-
const regex = /(<boltAction[^>]*type="file"[^>]*>)([\s\S]*?)(<\/boltAction>)/g;
|
56 |
-
|
57 |
-
// Replace each matching occurrence
|
58 |
-
return input.replace(regex, (_0, openingTag, _2, closingTag) => {
|
59 |
-
return `${openingTag}\n ...\n ${closingTag}`;
|
60 |
-
});
|
61 |
-
}
|
62 |
-
|
63 |
-
// Common patterns to ignore, similar to .gitignore
|
64 |
-
const IGNORE_PATTERNS = [
|
65 |
-
'node_modules/**',
|
66 |
-
'.git/**',
|
67 |
-
'dist/**',
|
68 |
-
'build/**',
|
69 |
-
'.next/**',
|
70 |
-
'coverage/**',
|
71 |
-
'.cache/**',
|
72 |
-
'.vscode/**',
|
73 |
-
'.idea/**',
|
74 |
-
'**/*.log',
|
75 |
-
'**/.DS_Store',
|
76 |
-
'**/npm-debug.log*',
|
77 |
-
'**/yarn-debug.log*',
|
78 |
-
'**/yarn-error.log*',
|
79 |
-
'**/*lock.json',
|
80 |
-
'**/*lock.yml',
|
81 |
-
];
|
82 |
-
const ig = ignore().add(IGNORE_PATTERNS);
|
83 |
-
|
84 |
-
const CACHE_CONTROL_METADATA = {
|
85 |
-
experimental_providerMetadata: {
|
86 |
-
anthropic: { cacheControl: { type: 'ephemeral' } },
|
87 |
-
},
|
88 |
-
};
|
89 |
-
|
90 |
-
function createFilesContext(files: FileMap) {
|
91 |
-
let filePaths = Object.keys(files);
|
92 |
-
filePaths = filePaths.filter((x) => {
|
93 |
-
const relPath = x.replace('/home/project/', '');
|
94 |
-
return !ig.ignores(relPath);
|
95 |
-
});
|
96 |
-
|
97 |
-
const fileContexts = filePaths
|
98 |
-
.filter((x) => files[x] && files[x].type == 'file')
|
99 |
-
.map((path) => {
|
100 |
-
const dirent = files[path];
|
101 |
-
|
102 |
-
if (!dirent || dirent.type == 'folder') {
|
103 |
-
return '';
|
104 |
-
}
|
105 |
-
|
106 |
-
const codeWithLinesNumbers = dirent.content
|
107 |
-
.split('\n')
|
108 |
-
.map((v, i) => `${i + 1}|${v}`)
|
109 |
-
.join('\n');
|
110 |
-
|
111 |
-
return `<file path="${path}">\n${codeWithLinesNumbers}\n</file>`;
|
112 |
-
});
|
113 |
-
|
114 |
-
return `Below are the code files present in the webcontainer:\ncode format:\n<line number>|<line content>\n <codebase>${fileContexts.join('\n\n')}\n\n</codebase>`;
|
115 |
-
}
|
116 |
-
|
117 |
-
function persistMessages(messages: CoreMessage[]) {
|
118 |
-
try {
|
119 |
-
const messagesFilePath = 'messages.json';
|
120 |
-
fs.writeFileSync(messagesFilePath, JSON.stringify(messages, null, 2), 'utf8');
|
121 |
-
} catch (error) {
|
122 |
-
console.error('Error writing messages to file:', error);
|
123 |
-
}
|
124 |
-
}
|
125 |
-
|
126 |
-
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
|
127 |
-
const textContent = Array.isArray(message.content)
|
128 |
-
? message.content.find((item) => item.type === 'text')?.text || ''
|
129 |
-
: message.content;
|
130 |
-
|
131 |
-
const modelMatch = textContent.match(MODEL_REGEX);
|
132 |
-
const providerMatch = textContent.match(PROVIDER_REGEX);
|
133 |
-
|
134 |
-
/*
|
135 |
-
* Extract model
|
136 |
-
* const modelMatch = message.content.match(MODEL_REGEX);
|
137 |
-
*/
|
138 |
-
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
|
139 |
-
|
140 |
-
/*
|
141 |
-
* Extract provider
|
142 |
-
* const providerMatch = message.content.match(PROVIDER_REGEX);
|
143 |
-
*/
|
144 |
-
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER.name;
|
145 |
-
|
146 |
-
const cleanedContent = Array.isArray(message.content)
|
147 |
-
? message.content.map((item) => {
|
148 |
-
if (item.type === 'text') {
|
149 |
-
return {
|
150 |
-
type: 'text',
|
151 |
-
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
|
152 |
-
};
|
153 |
-
}
|
154 |
-
|
155 |
-
return item; // Preserve image_url and other types as is
|
156 |
-
})
|
157 |
-
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
|
158 |
-
|
159 |
-
return { model, provider, content: cleanedContent };
|
160 |
-
}
|
161 |
-
|
162 |
-
const logger = createScopedLogger('stream-text');
|
163 |
-
|
164 |
-
export async function streamText(props: {
|
165 |
-
messages: Messages;
|
166 |
-
env: Env;
|
167 |
-
options?: StreamingOptions;
|
168 |
-
apiKeys?: Record<string, string>;
|
169 |
-
files?: FileMap;
|
170 |
-
providerSettings?: Record<string, IProviderSetting>;
|
171 |
-
promptId?: string;
|
172 |
-
contextOptimization?: boolean;
|
173 |
-
isPromptCachingEnabled?: boolean;
|
174 |
-
}) {
|
175 |
-
const {
|
176 |
-
messages,
|
177 |
-
env: serverEnv,
|
178 |
-
options,
|
179 |
-
apiKeys,
|
180 |
-
files,
|
181 |
-
providerSettings,
|
182 |
-
promptId,
|
183 |
-
contextOptimization,
|
184 |
-
isPromptCachingEnabled,
|
185 |
-
} = props;
|
186 |
-
|
187 |
-
let currentModel = DEFAULT_MODEL;
|
188 |
-
let currentProvider = DEFAULT_PROVIDER.name;
|
189 |
-
|
190 |
-
const processedMessages = messages.map((message, idx) => {
|
191 |
-
if (message.role === 'user') {
|
192 |
-
const { model, provider, content } = extractPropertiesFromMessage(message);
|
193 |
-
currentModel = model;
|
194 |
-
currentProvider = provider;
|
195 |
-
|
196 |
-
const putCacheControl = isPromptCachingEnabled && idx >= messages?.length - 4;
|
197 |
-
|
198 |
-
return {
|
199 |
-
...message,
|
200 |
-
content,
|
201 |
-
...(putCacheControl && CACHE_CONTROL_METADATA),
|
202 |
-
};
|
203 |
-
} else if (message.role == 'assistant') {
|
204 |
-
let content = message.content;
|
205 |
-
|
206 |
-
if (contextOptimization) {
|
207 |
-
content = simplifyBoltActions(content);
|
208 |
-
}
|
209 |
-
|
210 |
-
return { ...message, content };
|
211 |
-
}
|
212 |
-
|
213 |
-
return message;
|
214 |
-
});
|
215 |
-
|
216 |
-
const provider = PROVIDER_LIST.find((p) => p.name === currentProvider) || DEFAULT_PROVIDER;
|
217 |
-
const staticModels = LLMManager.getInstance().getStaticModelListFromProvider(provider);
|
218 |
-
let modelDetails = staticModels.find((m) => m.name === currentModel);
|
219 |
-
|
220 |
-
if (!modelDetails) {
|
221 |
-
const modelsList = [
|
222 |
-
...(provider.staticModels || []),
|
223 |
-
...(await LLMManager.getInstance().getModelListFromProvider(provider, {
|
224 |
-
apiKeys,
|
225 |
-
providerSettings,
|
226 |
-
serverEnv: serverEnv as any,
|
227 |
-
})),
|
228 |
-
];
|
229 |
-
|
230 |
-
if (!modelsList.length) {
|
231 |
-
throw new Error(`No models found for provider ${provider.name}`);
|
232 |
-
}
|
233 |
-
|
234 |
-
modelDetails = modelsList.find((m) => m.name === currentModel);
|
235 |
-
|
236 |
-
if (!modelDetails) {
|
237 |
-
// Fallback to first model
|
238 |
-
logger.warn(
|
239 |
-
`MODEL [${currentModel}] not found in provider [${provider.name}]. Falling back to first model. ${modelsList[0].name}`,
|
240 |
-
);
|
241 |
-
modelDetails = modelsList[0];
|
242 |
-
}
|
243 |
-
}
|
244 |
-
|
245 |
-
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
|
246 |
-
|
247 |
-
let systemPrompt =
|
248 |
-
PromptLibrary.getPropmtFromLibrary(promptId || 'default', {
|
249 |
-
cwd: WORK_DIR,
|
250 |
-
allowedHtmlElements: allowedHTMLElements,
|
251 |
-
modificationTagName: MODIFICATIONS_TAG_NAME,
|
252 |
-
}) ?? getSystemPrompt();
|
253 |
-
|
254 |
-
if (files && contextOptimization) {
|
255 |
-
const codeContext = createFilesContext(files);
|
256 |
-
systemPrompt = `${systemPrompt}\n\n ${codeContext}`;
|
257 |
-
}
|
258 |
-
|
259 |
-
logger.info(`
|
260 |
-
|
261 |
-
if (isPromptCachingEnabled) {
|
262 |
-
const messages = [
|
263 |
-
{
|
264 |
-
role: 'system',
|
265 |
-
content: systemPrompt,
|
266 |
-
experimental_providerMetadata: {
|
267 |
-
anthropic: { cacheControl: { type: 'ephemeral' } },
|
268 |
-
},
|
269 |
-
},
|
270 |
-
...processedMessages,
|
271 |
-
] as CoreMessage[];
|
272 |
-
|
273 |
-
persistMessages(messages);
|
274 |
-
|
275 |
-
return _streamText({
|
276 |
-
model: provider.getModelInstance({
|
277 |
-
model:
|
278 |
-
serverEnv,
|
279 |
-
apiKeys,
|
280 |
-
providerSettings,
|
281 |
-
}),
|
282 |
-
maxTokens: dynamicMaxTokens,
|
283 |
-
messages,
|
284 |
-
...options,
|
285 |
-
});
|
286 |
-
}
|
287 |
-
|
288 |
-
return _streamText({
|
289 |
-
model: provider.getModelInstance({
|
290 |
-
model:
|
291 |
-
serverEnv,
|
292 |
-
apiKeys,
|
293 |
-
providerSettings,
|
294 |
-
}),
|
295 |
-
system: systemPrompt,
|
296 |
-
maxTokens: dynamicMaxTokens,
|
297 |
-
messages: convertToCoreMessages(processedMessages as any),
|
298 |
-
...options,
|
299 |
-
});
|
300 |
-
}
|
|
|
1 |
+
import { convertToCoreMessages, streamText as _streamText, type CoreMessage } from 'ai';
|
2 |
+
import fs from 'fs';
|
3 |
+
import { MAX_TOKENS } from './constants';
|
4 |
+
import { getSystemPrompt } from '~/lib/common/prompts/prompts';
|
5 |
+
import {
|
6 |
+
DEFAULT_MODEL,
|
7 |
+
DEFAULT_PROVIDER,
|
8 |
+
MODEL_REGEX,
|
9 |
+
MODIFICATIONS_TAG_NAME,
|
10 |
+
PROVIDER_LIST,
|
11 |
+
PROVIDER_REGEX,
|
12 |
+
WORK_DIR,
|
13 |
+
} from '~/utils/constants';
|
14 |
+
import ignore from 'ignore';
|
15 |
+
import type { IProviderSetting } from '~/types/model';
|
16 |
+
import { PromptLibrary } from '~/lib/common/prompt-library';
|
17 |
+
import { allowedHTMLElements } from '~/utils/markdown';
|
18 |
+
import { LLMManager } from '~/lib/modules/llm/manager';
|
19 |
+
import { createScopedLogger } from '~/utils/logger';
|
20 |
+
|
21 |
+
interface ToolResult<Name extends string, Args, Result> {
|
22 |
+
toolCallId: string;
|
23 |
+
toolName: Name;
|
24 |
+
args: Args;
|
25 |
+
result: Result;
|
26 |
+
}
|
27 |
+
|
28 |
+
interface Message {
|
29 |
+
role: 'user' | 'assistant';
|
30 |
+
content: string;
|
31 |
+
toolInvocations?: ToolResult<string, unknown, unknown>[];
|
32 |
+
model?: string;
|
33 |
+
}
|
34 |
+
|
35 |
+
export type Messages = Message[];
|
36 |
+
|
37 |
+
export type StreamingOptions = Omit<Parameters<typeof _streamText>[0], 'model'>;
|
38 |
+
|
39 |
+
export interface File {
|
40 |
+
type: 'file';
|
41 |
+
content: string;
|
42 |
+
isBinary: boolean;
|
43 |
+
}
|
44 |
+
|
45 |
+
export interface Folder {
|
46 |
+
type: 'folder';
|
47 |
+
}
|
48 |
+
|
49 |
+
type Dirent = File | Folder;
|
50 |
+
|
51 |
+
export type FileMap = Record<string, Dirent | undefined>;
|
52 |
+
|
53 |
+
export function simplifyBoltActions(input: string): string {
|
54 |
+
// Using regex to match boltAction tags that have type="file"
|
55 |
+
const regex = /(<boltAction[^>]*type="file"[^>]*>)([\s\S]*?)(<\/boltAction>)/g;
|
56 |
+
|
57 |
+
// Replace each matching occurrence
|
58 |
+
return input.replace(regex, (_0, openingTag, _2, closingTag) => {
|
59 |
+
return `${openingTag}\n ...\n ${closingTag}`;
|
60 |
+
});
|
61 |
+
}
|
62 |
+
|
63 |
+
// Common patterns to ignore, similar to .gitignore
|
64 |
+
const IGNORE_PATTERNS = [
|
65 |
+
'node_modules/**',
|
66 |
+
'.git/**',
|
67 |
+
'dist/**',
|
68 |
+
'build/**',
|
69 |
+
'.next/**',
|
70 |
+
'coverage/**',
|
71 |
+
'.cache/**',
|
72 |
+
'.vscode/**',
|
73 |
+
'.idea/**',
|
74 |
+
'**/*.log',
|
75 |
+
'**/.DS_Store',
|
76 |
+
'**/npm-debug.log*',
|
77 |
+
'**/yarn-debug.log*',
|
78 |
+
'**/yarn-error.log*',
|
79 |
+
'**/*lock.json',
|
80 |
+
'**/*lock.yml',
|
81 |
+
];
|
82 |
+
const ig = ignore().add(IGNORE_PATTERNS);
|
83 |
+
|
84 |
+
const CACHE_CONTROL_METADATA = {
|
85 |
+
experimental_providerMetadata: {
|
86 |
+
anthropic: { cacheControl: { type: 'ephemeral' } },
|
87 |
+
},
|
88 |
+
};
|
89 |
+
|
90 |
+
function createFilesContext(files: FileMap) {
|
91 |
+
let filePaths = Object.keys(files);
|
92 |
+
filePaths = filePaths.filter((x) => {
|
93 |
+
const relPath = x.replace('/home/project/', '');
|
94 |
+
return !ig.ignores(relPath);
|
95 |
+
});
|
96 |
+
|
97 |
+
const fileContexts = filePaths
|
98 |
+
.filter((x) => files[x] && files[x].type == 'file')
|
99 |
+
.map((path) => {
|
100 |
+
const dirent = files[path];
|
101 |
+
|
102 |
+
if (!dirent || dirent.type == 'folder') {
|
103 |
+
return '';
|
104 |
+
}
|
105 |
+
|
106 |
+
const codeWithLinesNumbers = dirent.content
|
107 |
+
.split('\n')
|
108 |
+
.map((v, i) => `${i + 1}|${v}`)
|
109 |
+
.join('\n');
|
110 |
+
|
111 |
+
return `<file path="${path}">\n${codeWithLinesNumbers}\n</file>`;
|
112 |
+
});
|
113 |
+
|
114 |
+
return `Below are the code files present in the webcontainer:\ncode format:\n<line number>|<line content>\n <codebase>${fileContexts.join('\n\n')}\n\n</codebase>`;
|
115 |
+
}
|
116 |
+
|
117 |
+
function persistMessages(messages: CoreMessage[]) {
|
118 |
+
try {
|
119 |
+
const messagesFilePath = 'messages.json';
|
120 |
+
fs.writeFileSync(messagesFilePath, JSON.stringify(messages, null, 2), 'utf8');
|
121 |
+
} catch (error) {
|
122 |
+
console.error('Error writing messages to file:', error);
|
123 |
+
}
|
124 |
+
}
|
125 |
+
|
126 |
+
function extractPropertiesFromMessage(message: Message): { model: string; provider: string; content: string } {
|
127 |
+
const textContent = Array.isArray(message.content)
|
128 |
+
? message.content.find((item) => item.type === 'text')?.text || ''
|
129 |
+
: message.content;
|
130 |
+
|
131 |
+
const modelMatch = textContent.match(MODEL_REGEX);
|
132 |
+
const providerMatch = textContent.match(PROVIDER_REGEX);
|
133 |
+
|
134 |
+
/*
|
135 |
+
* Extract model
|
136 |
+
* const modelMatch = message.content.match(MODEL_REGEX);
|
137 |
+
*/
|
138 |
+
const model = modelMatch ? modelMatch[1] : DEFAULT_MODEL;
|
139 |
+
|
140 |
+
/*
|
141 |
+
* Extract provider
|
142 |
+
* const providerMatch = message.content.match(PROVIDER_REGEX);
|
143 |
+
*/
|
144 |
+
const provider = providerMatch ? providerMatch[1] : DEFAULT_PROVIDER.name;
|
145 |
+
|
146 |
+
const cleanedContent = Array.isArray(message.content)
|
147 |
+
? message.content.map((item) => {
|
148 |
+
if (item.type === 'text') {
|
149 |
+
return {
|
150 |
+
type: 'text',
|
151 |
+
text: item.text?.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, ''),
|
152 |
+
};
|
153 |
+
}
|
154 |
+
|
155 |
+
return item; // Preserve image_url and other types as is
|
156 |
+
})
|
157 |
+
: textContent.replace(MODEL_REGEX, '').replace(PROVIDER_REGEX, '');
|
158 |
+
|
159 |
+
return { model, provider, content: cleanedContent };
|
160 |
+
}
|
161 |
+
|
162 |
+
const logger = createScopedLogger('stream-text');
|
163 |
+
|
164 |
+
export async function streamText(props: {
|
165 |
+
messages: Messages;
|
166 |
+
env: Env;
|
167 |
+
options?: StreamingOptions;
|
168 |
+
apiKeys?: Record<string, string>;
|
169 |
+
files?: FileMap;
|
170 |
+
providerSettings?: Record<string, IProviderSetting>;
|
171 |
+
promptId?: string;
|
172 |
+
contextOptimization?: boolean;
|
173 |
+
isPromptCachingEnabled?: boolean;
|
174 |
+
}) {
|
175 |
+
const {
|
176 |
+
messages,
|
177 |
+
env: serverEnv,
|
178 |
+
options,
|
179 |
+
apiKeys,
|
180 |
+
files,
|
181 |
+
providerSettings,
|
182 |
+
promptId,
|
183 |
+
contextOptimization,
|
184 |
+
isPromptCachingEnabled,
|
185 |
+
} = props;
|
186 |
+
|
187 |
+
let currentModel = DEFAULT_MODEL;
|
188 |
+
let currentProvider = DEFAULT_PROVIDER.name;
|
189 |
+
|
190 |
+
const processedMessages = messages.map((message, idx) => {
|
191 |
+
if (message.role === 'user') {
|
192 |
+
const { model, provider, content } = extractPropertiesFromMessage(message);
|
193 |
+
currentModel = model;
|
194 |
+
currentProvider = provider;
|
195 |
+
|
196 |
+
const putCacheControl = isPromptCachingEnabled && idx >= messages?.length - 4;
|
197 |
+
|
198 |
+
return {
|
199 |
+
...message,
|
200 |
+
content,
|
201 |
+
...(putCacheControl && CACHE_CONTROL_METADATA),
|
202 |
+
};
|
203 |
+
} else if (message.role == 'assistant') {
|
204 |
+
let content = message.content;
|
205 |
+
|
206 |
+
if (contextOptimization) {
|
207 |
+
content = simplifyBoltActions(content);
|
208 |
+
}
|
209 |
+
|
210 |
+
return { ...message, content };
|
211 |
+
}
|
212 |
+
|
213 |
+
return message;
|
214 |
+
});
|
215 |
+
|
216 |
+
const provider = PROVIDER_LIST.find((p) => p.name === currentProvider) || DEFAULT_PROVIDER;
|
217 |
+
const staticModels = LLMManager.getInstance().getStaticModelListFromProvider(provider);
|
218 |
+
let modelDetails = staticModels.find((m) => m.name === currentModel);
|
219 |
+
|
220 |
+
if (!modelDetails) {
|
221 |
+
const modelsList = [
|
222 |
+
...(provider.staticModels || []),
|
223 |
+
...(await LLMManager.getInstance().getModelListFromProvider(provider, {
|
224 |
+
apiKeys,
|
225 |
+
providerSettings,
|
226 |
+
serverEnv: serverEnv as any,
|
227 |
+
})),
|
228 |
+
];
|
229 |
+
|
230 |
+
if (!modelsList.length) {
|
231 |
+
throw new Error(`No models found for provider ${provider.name}`);
|
232 |
+
}
|
233 |
+
|
234 |
+
modelDetails = modelsList.find((m) => m.name === currentModel);
|
235 |
+
|
236 |
+
if (!modelDetails) {
|
237 |
+
// Fallback to first model
|
238 |
+
logger.warn(
|
239 |
+
`MODEL [${currentModel}] not found in provider [${provider.name}]. Falling back to first model. ${modelsList[0].name}`,
|
240 |
+
);
|
241 |
+
modelDetails = modelsList[0];
|
242 |
+
}
|
243 |
+
}
|
244 |
+
|
245 |
+
const dynamicMaxTokens = modelDetails && modelDetails.maxTokenAllowed ? modelDetails.maxTokenAllowed : MAX_TOKENS;
|
246 |
+
|
247 |
+
let systemPrompt =
|
248 |
+
PromptLibrary.getPropmtFromLibrary(promptId || 'default', {
|
249 |
+
cwd: WORK_DIR,
|
250 |
+
allowedHtmlElements: allowedHTMLElements,
|
251 |
+
modificationTagName: MODIFICATIONS_TAG_NAME,
|
252 |
+
}) ?? getSystemPrompt();
|
253 |
+
|
254 |
+
if (files && contextOptimization) {
|
255 |
+
const codeContext = createFilesContext(files);
|
256 |
+
systemPrompt = `${systemPrompt}\n\n ${codeContext}`;
|
257 |
+
}
|
258 |
+
|
259 |
+
logger.info(`Sending llm call to ${provider.name} with model ${modelDetails.name}`);
|
260 |
+
|
261 |
+
if (isPromptCachingEnabled) {
|
262 |
+
const messages = [
|
263 |
+
{
|
264 |
+
role: 'system',
|
265 |
+
content: systemPrompt,
|
266 |
+
experimental_providerMetadata: {
|
267 |
+
anthropic: { cacheControl: { type: 'ephemeral' } },
|
268 |
+
},
|
269 |
+
},
|
270 |
+
...processedMessages,
|
271 |
+
] as CoreMessage[];
|
272 |
+
|
273 |
+
persistMessages(messages);
|
274 |
+
|
275 |
+
return _streamText({
|
276 |
+
model: provider.getModelInstance({
|
277 |
+
model: modelDetails.name,
|
278 |
+
serverEnv,
|
279 |
+
apiKeys,
|
280 |
+
providerSettings,
|
281 |
+
}),
|
282 |
+
maxTokens: dynamicMaxTokens,
|
283 |
+
messages,
|
284 |
+
...options,
|
285 |
+
});
|
286 |
+
}
|
287 |
+
|
288 |
+
return _streamText({
|
289 |
+
model: provider.getModelInstance({
|
290 |
+
model: modelDetails.name,
|
291 |
+
serverEnv,
|
292 |
+
apiKeys,
|
293 |
+
providerSettings,
|
294 |
+
}),
|
295 |
+
system: systemPrompt,
|
296 |
+
maxTokens: dynamicMaxTokens,
|
297 |
+
messages: convertToCoreMessages(processedMessages as any),
|
298 |
+
...options,
|
299 |
+
});
|
300 |
+
}
|
app/lib/api/cookies.ts
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
export function parseCookies(cookieHeader: string | null) {
|
2 |
+
const cookies: Record<string, string> = {};
|
3 |
+
|
4 |
+
if (!cookieHeader) {
|
5 |
+
return cookies;
|
6 |
+
}
|
7 |
+
|
8 |
+
// Split the cookie string by semicolons and spaces
|
9 |
+
const items = cookieHeader.split(';').map((cookie) => cookie.trim());
|
10 |
+
|
11 |
+
items.forEach((item) => {
|
12 |
+
const [name, ...rest] = item.split('=');
|
13 |
+
|
14 |
+
if (name && rest.length > 0) {
|
15 |
+
// Decode the name and value, and join value parts in case it contains '='
|
16 |
+
const decodedName = decodeURIComponent(name.trim());
|
17 |
+
const decodedValue = decodeURIComponent(rest.join('=').trim());
|
18 |
+
cookies[decodedName] = decodedValue;
|
19 |
+
}
|
20 |
+
});
|
21 |
+
|
22 |
+
return cookies;
|
23 |
+
}
|
24 |
+
|
25 |
+
export function getApiKeysFromCookie(cookieHeader: string | null): Record<string, string> {
|
26 |
+
const cookies = parseCookies(cookieHeader);
|
27 |
+
return cookies.apiKeys ? JSON.parse(cookies.apiKeys) : {};
|
28 |
+
}
|
29 |
+
|
30 |
+
export function getProviderSettingsFromCookie(cookieHeader: string | null): Record<string, any> {
|
31 |
+
const cookies = parseCookies(cookieHeader);
|
32 |
+
return cookies.providers ? JSON.parse(cookies.providers) : {};
|
33 |
+
}
|
app/lib/hooks/useEditChatDescription.ts
CHANGED
@@ -3,10 +3,10 @@ import { useCallback, useEffect, useState } from 'react';
|
|
3 |
import { toast } from 'react-toastify';
|
4 |
import {
|
5 |
chatId as chatIdStore,
|
6 |
-
description as descriptionStore,
|
7 |
db,
|
8 |
-
|
9 |
getMessages,
|
|
|
10 |
} from '~/lib/persistence';
|
11 |
|
12 |
interface EditChatDescriptionOptions {
|
|
|
3 |
import { toast } from 'react-toastify';
|
4 |
import {
|
5 |
chatId as chatIdStore,
|
|
|
6 |
db,
|
7 |
+
description as descriptionStore,
|
8 |
getMessages,
|
9 |
+
updateChatDescription,
|
10 |
} from '~/lib/persistence';
|
11 |
|
12 |
interface EditChatDescriptionOptions {
|
app/lib/hooks/useGit.ts
CHANGED
@@ -1,325 +1,325 @@
|
|
1 |
-
import type { WebContainer } from '@webcontainer/api';
|
2 |
-
import { useCallback, useEffect, useRef, useState, type MutableRefObject } from 'react';
|
3 |
-
import { webcontainer as webcontainerPromise } from '~/lib/webcontainer';
|
4 |
-
import git, { type GitAuth, type PromiseFsClient } from 'isomorphic-git';
|
5 |
-
import http from 'isomorphic-git/http/web';
|
6 |
-
import Cookies from 'js-cookie';
|
7 |
-
import { toast } from 'react-toastify';
|
8 |
-
|
9 |
-
const lookupSavedPassword = (url: string) => {
|
10 |
-
const domain = url.split('/')[2];
|
11 |
-
const gitCreds = Cookies.get(`git:${domain}`);
|
12 |
-
|
13 |
-
if (!gitCreds) {
|
14 |
-
return null;
|
15 |
-
}
|
16 |
-
|
17 |
-
try {
|
18 |
-
const { username, password } = JSON.parse(gitCreds || '{}');
|
19 |
-
return { username, password };
|
20 |
-
} catch (error) {
|
21 |
-
console.log(`Failed to parse Git Cookie ${error}`);
|
22 |
-
return null;
|
23 |
-
}
|
24 |
-
};
|
25 |
-
|
26 |
-
const saveGitAuth = (url: string, auth: GitAuth) => {
|
27 |
-
const domain = url.split('/')[2];
|
28 |
-
Cookies.set(`git:${domain}`, JSON.stringify(auth));
|
29 |
-
};
|
30 |
-
|
31 |
-
export function useGit() {
|
32 |
-
const [ready, setReady] = useState(false);
|
33 |
-
const [webcontainer, setWebcontainer] = useState<WebContainer>();
|
34 |
-
const [fs, setFs] = useState<PromiseFsClient>();
|
35 |
-
const fileData = useRef<Record<string, { data: any; encoding?: string }>>({});
|
36 |
-
useEffect(() => {
|
37 |
-
webcontainerPromise.then((container) => {
|
38 |
-
fileData.current = {};
|
39 |
-
setWebcontainer(container);
|
40 |
-
setFs(getFs(container, fileData));
|
41 |
-
setReady(true);
|
42 |
-
});
|
43 |
-
}, []);
|
44 |
-
|
45 |
-
const gitClone = useCallback(
|
46 |
-
async (url: string) => {
|
47 |
-
if (!webcontainer || !fs || !ready) {
|
48 |
-
throw 'Webcontainer not initialized';
|
49 |
-
}
|
50 |
-
|
51 |
-
fileData.current = {};
|
52 |
-
|
53 |
-
const headers: {
|
54 |
-
[x: string]: string;
|
55 |
-
} = {
|
56 |
-
'User-Agent': 'bolt.diy',
|
57 |
-
};
|
58 |
-
|
59 |
-
const auth = lookupSavedPassword(url);
|
60 |
-
|
61 |
-
if (auth) {
|
62 |
-
headers.Authorization = `Basic ${Buffer.from(`${auth.username}:${auth.password}`).toString('base64')}`;
|
63 |
-
}
|
64 |
-
|
65 |
-
try {
|
66 |
-
await git.clone({
|
67 |
-
fs,
|
68 |
-
http,
|
69 |
-
dir: webcontainer.workdir,
|
70 |
-
url,
|
71 |
-
depth: 1,
|
72 |
-
singleBranch: true,
|
73 |
-
corsProxy: '/api/git-proxy',
|
74 |
-
headers,
|
75 |
-
|
76 |
-
onAuth: (url) => {
|
77 |
-
let auth = lookupSavedPassword(url);
|
78 |
-
|
79 |
-
if (auth) {
|
80 |
-
return auth;
|
81 |
-
}
|
82 |
-
|
83 |
-
if (confirm('This repo is password protected. Ready to enter a username & password?')) {
|
84 |
-
auth = {
|
85 |
-
username: prompt('Enter username'),
|
86 |
-
password: prompt('Enter password'),
|
87 |
-
};
|
88 |
-
return auth;
|
89 |
-
} else {
|
90 |
-
return { cancel: true };
|
91 |
-
}
|
92 |
-
},
|
93 |
-
onAuthFailure: (url, _auth) => {
|
94 |
-
toast.error(`Error Authenticating with ${url.split('/')[2]}`);
|
95 |
-
},
|
96 |
-
onAuthSuccess: (url, auth) => {
|
97 |
-
saveGitAuth(url, auth);
|
98 |
-
},
|
99 |
-
});
|
100 |
-
|
101 |
-
const data: Record<string, { data: any; encoding?: string }> = {};
|
102 |
-
|
103 |
-
for (const [key, value] of Object.entries(fileData.current)) {
|
104 |
-
data[key] = value;
|
105 |
-
}
|
106 |
-
|
107 |
-
return { workdir: webcontainer.workdir, data };
|
108 |
-
} catch (error) {
|
109 |
-
console.error('Git clone error:', error);
|
110 |
-
throw error;
|
111 |
-
}
|
112 |
-
},
|
113 |
-
[webcontainer, fs, ready],
|
114 |
-
);
|
115 |
-
|
116 |
-
return { ready, gitClone };
|
117 |
-
}
|
118 |
-
|
119 |
-
const getFs = (
|
120 |
-
webcontainer: WebContainer,
|
121 |
-
record: MutableRefObject<Record<string, { data: any; encoding?: string }>>,
|
122 |
-
) => ({
|
123 |
-
promises: {
|
124 |
-
readFile: async (path: string, options: any) => {
|
125 |
-
const encoding = options?.encoding;
|
126 |
-
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
127 |
-
|
128 |
-
try {
|
129 |
-
const result = await webcontainer.fs.readFile(relativePath, encoding);
|
130 |
-
|
131 |
-
return result;
|
132 |
-
} catch (error) {
|
133 |
-
throw error;
|
134 |
-
}
|
135 |
-
},
|
136 |
-
writeFile: async (path: string, data: any, options: any) => {
|
137 |
-
const encoding = options.encoding;
|
138 |
-
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
139 |
-
|
140 |
-
if (record.current) {
|
141 |
-
record.current[relativePath] = { data, encoding };
|
142 |
-
}
|
143 |
-
|
144 |
-
try {
|
145 |
-
const result = await webcontainer.fs.writeFile(relativePath, data, { ...options, encoding });
|
146 |
-
|
147 |
-
return result;
|
148 |
-
} catch (error) {
|
149 |
-
throw error;
|
150 |
-
}
|
151 |
-
},
|
152 |
-
mkdir: async (path: string, options: any) => {
|
153 |
-
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
154 |
-
|
155 |
-
try {
|
156 |
-
const result = await webcontainer.fs.mkdir(relativePath, { ...options, recursive: true });
|
157 |
-
|
158 |
-
return result;
|
159 |
-
} catch (error) {
|
160 |
-
throw error;
|
161 |
-
}
|
162 |
-
},
|
163 |
-
readdir: async (path: string, options: any) => {
|
164 |
-
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
165 |
-
|
166 |
-
try {
|
167 |
-
const result = await webcontainer.fs.readdir(relativePath, options);
|
168 |
-
|
169 |
-
return result;
|
170 |
-
} catch (error) {
|
171 |
-
throw error;
|
172 |
-
}
|
173 |
-
},
|
174 |
-
rm: async (path: string, options: any) => {
|
175 |
-
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
176 |
-
|
177 |
-
try {
|
178 |
-
const result = await webcontainer.fs.rm(relativePath, { ...(options || {}) });
|
179 |
-
|
180 |
-
return result;
|
181 |
-
} catch (error) {
|
182 |
-
throw error;
|
183 |
-
}
|
184 |
-
},
|
185 |
-
rmdir: async (path: string, options: any) => {
|
186 |
-
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
187 |
-
|
188 |
-
try {
|
189 |
-
const result = await webcontainer.fs.rm(relativePath, { recursive: true, ...options });
|
190 |
-
|
191 |
-
return result;
|
192 |
-
} catch (error) {
|
193 |
-
throw error;
|
194 |
-
}
|
195 |
-
},
|
196 |
-
unlink: async (path: string) => {
|
197 |
-
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
198 |
-
|
199 |
-
try {
|
200 |
-
return await webcontainer.fs.rm(relativePath, { recursive: false });
|
201 |
-
} catch (error) {
|
202 |
-
throw error;
|
203 |
-
}
|
204 |
-
},
|
205 |
-
stat: async (path: string) => {
|
206 |
-
try {
|
207 |
-
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
208 |
-
const resp = await webcontainer.fs.readdir(pathUtils.dirname(relativePath), { withFileTypes: true });
|
209 |
-
const name = pathUtils.basename(relativePath);
|
210 |
-
const fileInfo = resp.find((x) => x.name == name);
|
211 |
-
|
212 |
-
if (!fileInfo) {
|
213 |
-
throw new Error(`ENOENT: no such file or directory, stat '${path}'`);
|
214 |
-
}
|
215 |
-
|
216 |
-
return {
|
217 |
-
isFile: () => fileInfo.isFile(),
|
218 |
-
isDirectory: () => fileInfo.isDirectory(),
|
219 |
-
isSymbolicLink: () => false,
|
220 |
-
size: 1,
|
221 |
-
mode: 0o666, // Default permissions
|
222 |
-
mtimeMs: Date.now(),
|
223 |
-
uid: 1000,
|
224 |
-
gid: 1000,
|
225 |
-
};
|
226 |
-
} catch (error: any) {
|
227 |
-
console.log(error?.message);
|
228 |
-
|
229 |
-
const err = new Error(`ENOENT: no such file or directory, stat '${path}'`) as NodeJS.ErrnoException;
|
230 |
-
err.code = 'ENOENT';
|
231 |
-
err.errno = -2;
|
232 |
-
err.syscall = 'stat';
|
233 |
-
err.path = path;
|
234 |
-
throw err;
|
235 |
-
}
|
236 |
-
},
|
237 |
-
lstat: async (path: string) => {
|
238 |
-
return await getFs(webcontainer, record).promises.stat(path);
|
239 |
-
},
|
240 |
-
readlink: async (path: string) => {
|
241 |
-
throw new Error(`EINVAL: invalid argument, readlink '${path}'`);
|
242 |
-
},
|
243 |
-
symlink: async (target: string, path: string) => {
|
244 |
-
/*
|
245 |
-
* Since WebContainer doesn't support symlinks,
|
246 |
-
* we'll throw a "operation not supported" error
|
247 |
-
*/
|
248 |
-
throw new Error(`EPERM: operation not permitted, symlink '${target}' -> '${path}'`);
|
249 |
-
},
|
250 |
-
|
251 |
-
chmod: async (_path: string, _mode: number) => {
|
252 |
-
/*
|
253 |
-
* WebContainer doesn't support changing permissions,
|
254 |
-
* but we can pretend it succeeded for compatibility
|
255 |
-
*/
|
256 |
-
return await Promise.resolve();
|
257 |
-
},
|
258 |
-
},
|
259 |
-
});
|
260 |
-
|
261 |
-
const pathUtils = {
|
262 |
-
dirname: (path: string) => {
|
263 |
-
// Handle empty or just filename cases
|
264 |
-
if (!path || !path.includes('/')) {
|
265 |
-
return '.';
|
266 |
-
}
|
267 |
-
|
268 |
-
// Remove trailing slashes
|
269 |
-
path = path.replace(/\/+$/, '');
|
270 |
-
|
271 |
-
// Get directory part
|
272 |
-
return path.split('/').slice(0, -1).join('/') || '/';
|
273 |
-
},
|
274 |
-
|
275 |
-
basename: (path: string, ext?: string) => {
|
276 |
-
// Remove trailing slashes
|
277 |
-
path = path.replace(/\/+$/, '');
|
278 |
-
|
279 |
-
// Get the last part of the path
|
280 |
-
const base = path.split('/').pop() || '';
|
281 |
-
|
282 |
-
// If extension is provided, remove it from the result
|
283 |
-
if (ext && base.endsWith(ext)) {
|
284 |
-
return base.slice(0, -ext.length);
|
285 |
-
}
|
286 |
-
|
287 |
-
return base;
|
288 |
-
},
|
289 |
-
relative: (from: string, to: string): string => {
|
290 |
-
// Handle empty inputs
|
291 |
-
if (!from || !to) {
|
292 |
-
return '.';
|
293 |
-
}
|
294 |
-
|
295 |
-
// Normalize paths by removing trailing slashes and splitting
|
296 |
-
const normalizePathParts = (p: string) => p.replace(/\/+$/, '').split('/').filter(Boolean);
|
297 |
-
|
298 |
-
const fromParts = normalizePathParts(from);
|
299 |
-
const toParts = normalizePathParts(to);
|
300 |
-
|
301 |
-
// Find common parts at the start of both paths
|
302 |
-
let commonLength = 0;
|
303 |
-
const minLength = Math.min(fromParts.length, toParts.length);
|
304 |
-
|
305 |
-
for (let i = 0; i < minLength; i++) {
|
306 |
-
if (fromParts[i] !== toParts[i]) {
|
307 |
-
break;
|
308 |
-
}
|
309 |
-
|
310 |
-
commonLength++;
|
311 |
-
}
|
312 |
-
|
313 |
-
// Calculate the number of "../" needed
|
314 |
-
const upCount = fromParts.length - commonLength;
|
315 |
-
|
316 |
-
// Get the remaining path parts we need to append
|
317 |
-
const remainingPath = toParts.slice(commonLength);
|
318 |
-
|
319 |
-
// Construct the relative path
|
320 |
-
const relativeParts = [...Array(upCount).fill('..'), ...remainingPath];
|
321 |
-
|
322 |
-
// Handle empty result case
|
323 |
-
return relativeParts.length === 0 ? '.' : relativeParts.join('/');
|
324 |
-
},
|
325 |
-
};
|
|
|
1 |
+
import type { WebContainer } from '@webcontainer/api';
|
2 |
+
import { useCallback, useEffect, useRef, useState, type MutableRefObject } from 'react';
|
3 |
+
import { webcontainer as webcontainerPromise } from '~/lib/webcontainer';
|
4 |
+
import git, { type GitAuth, type PromiseFsClient } from 'isomorphic-git';
|
5 |
+
import http from 'isomorphic-git/http/web';
|
6 |
+
import Cookies from 'js-cookie';
|
7 |
+
import { toast } from 'react-toastify';
|
8 |
+
|
9 |
+
const lookupSavedPassword = (url: string) => {
|
10 |
+
const domain = url.split('/')[2];
|
11 |
+
const gitCreds = Cookies.get(`git:${domain}`);
|
12 |
+
|
13 |
+
if (!gitCreds) {
|
14 |
+
return null;
|
15 |
+
}
|
16 |
+
|
17 |
+
try {
|
18 |
+
const { username, password } = JSON.parse(gitCreds || '{}');
|
19 |
+
return { username, password };
|
20 |
+
} catch (error) {
|
21 |
+
console.log(`Failed to parse Git Cookie ${error}`);
|
22 |
+
return null;
|
23 |
+
}
|
24 |
+
};
|
25 |
+
|
26 |
+
const saveGitAuth = (url: string, auth: GitAuth) => {
|
27 |
+
const domain = url.split('/')[2];
|
28 |
+
Cookies.set(`git:${domain}`, JSON.stringify(auth));
|
29 |
+
};
|
30 |
+
|
31 |
+
export function useGit() {
|
32 |
+
const [ready, setReady] = useState(false);
|
33 |
+
const [webcontainer, setWebcontainer] = useState<WebContainer>();
|
34 |
+
const [fs, setFs] = useState<PromiseFsClient>();
|
35 |
+
const fileData = useRef<Record<string, { data: any; encoding?: string }>>({});
|
36 |
+
useEffect(() => {
|
37 |
+
webcontainerPromise.then((container) => {
|
38 |
+
fileData.current = {};
|
39 |
+
setWebcontainer(container);
|
40 |
+
setFs(getFs(container, fileData));
|
41 |
+
setReady(true);
|
42 |
+
});
|
43 |
+
}, []);
|
44 |
+
|
45 |
+
const gitClone = useCallback(
|
46 |
+
async (url: string) => {
|
47 |
+
if (!webcontainer || !fs || !ready) {
|
48 |
+
throw 'Webcontainer not initialized';
|
49 |
+
}
|
50 |
+
|
51 |
+
fileData.current = {};
|
52 |
+
|
53 |
+
const headers: {
|
54 |
+
[x: string]: string;
|
55 |
+
} = {
|
56 |
+
'User-Agent': 'bolt.diy',
|
57 |
+
};
|
58 |
+
|
59 |
+
const auth = lookupSavedPassword(url);
|
60 |
+
|
61 |
+
if (auth) {
|
62 |
+
headers.Authorization = `Basic ${Buffer.from(`${auth.username}:${auth.password}`).toString('base64')}`;
|
63 |
+
}
|
64 |
+
|
65 |
+
try {
|
66 |
+
await git.clone({
|
67 |
+
fs,
|
68 |
+
http,
|
69 |
+
dir: webcontainer.workdir,
|
70 |
+
url,
|
71 |
+
depth: 1,
|
72 |
+
singleBranch: true,
|
73 |
+
corsProxy: '/api/git-proxy',
|
74 |
+
headers,
|
75 |
+
|
76 |
+
onAuth: (url) => {
|
77 |
+
let auth = lookupSavedPassword(url);
|
78 |
+
|
79 |
+
if (auth) {
|
80 |
+
return auth;
|
81 |
+
}
|
82 |
+
|
83 |
+
if (confirm('This repo is password protected. Ready to enter a username & password?')) {
|
84 |
+
auth = {
|
85 |
+
username: prompt('Enter username'),
|
86 |
+
password: prompt('Enter password'),
|
87 |
+
};
|
88 |
+
return auth;
|
89 |
+
} else {
|
90 |
+
return { cancel: true };
|
91 |
+
}
|
92 |
+
},
|
93 |
+
onAuthFailure: (url, _auth) => {
|
94 |
+
toast.error(`Error Authenticating with ${url.split('/')[2]}`);
|
95 |
+
},
|
96 |
+
onAuthSuccess: (url, auth) => {
|
97 |
+
saveGitAuth(url, auth);
|
98 |
+
},
|
99 |
+
});
|
100 |
+
|
101 |
+
const data: Record<string, { data: any; encoding?: string }> = {};
|
102 |
+
|
103 |
+
for (const [key, value] of Object.entries(fileData.current)) {
|
104 |
+
data[key] = value;
|
105 |
+
}
|
106 |
+
|
107 |
+
return { workdir: webcontainer.workdir, data };
|
108 |
+
} catch (error) {
|
109 |
+
console.error('Git clone error:', error);
|
110 |
+
throw error;
|
111 |
+
}
|
112 |
+
},
|
113 |
+
[webcontainer, fs, ready],
|
114 |
+
);
|
115 |
+
|
116 |
+
return { ready, gitClone };
|
117 |
+
}
|
118 |
+
|
119 |
+
const getFs = (
|
120 |
+
webcontainer: WebContainer,
|
121 |
+
record: MutableRefObject<Record<string, { data: any; encoding?: string }>>,
|
122 |
+
) => ({
|
123 |
+
promises: {
|
124 |
+
readFile: async (path: string, options: any) => {
|
125 |
+
const encoding = options?.encoding;
|
126 |
+
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
127 |
+
|
128 |
+
try {
|
129 |
+
const result = await webcontainer.fs.readFile(relativePath, encoding);
|
130 |
+
|
131 |
+
return result;
|
132 |
+
} catch (error) {
|
133 |
+
throw error;
|
134 |
+
}
|
135 |
+
},
|
136 |
+
writeFile: async (path: string, data: any, options: any) => {
|
137 |
+
const encoding = options.encoding;
|
138 |
+
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
139 |
+
|
140 |
+
if (record.current) {
|
141 |
+
record.current[relativePath] = { data, encoding };
|
142 |
+
}
|
143 |
+
|
144 |
+
try {
|
145 |
+
const result = await webcontainer.fs.writeFile(relativePath, data, { ...options, encoding });
|
146 |
+
|
147 |
+
return result;
|
148 |
+
} catch (error) {
|
149 |
+
throw error;
|
150 |
+
}
|
151 |
+
},
|
152 |
+
mkdir: async (path: string, options: any) => {
|
153 |
+
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
154 |
+
|
155 |
+
try {
|
156 |
+
const result = await webcontainer.fs.mkdir(relativePath, { ...options, recursive: true });
|
157 |
+
|
158 |
+
return result;
|
159 |
+
} catch (error) {
|
160 |
+
throw error;
|
161 |
+
}
|
162 |
+
},
|
163 |
+
readdir: async (path: string, options: any) => {
|
164 |
+
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
165 |
+
|
166 |
+
try {
|
167 |
+
const result = await webcontainer.fs.readdir(relativePath, options);
|
168 |
+
|
169 |
+
return result;
|
170 |
+
} catch (error) {
|
171 |
+
throw error;
|
172 |
+
}
|
173 |
+
},
|
174 |
+
rm: async (path: string, options: any) => {
|
175 |
+
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
176 |
+
|
177 |
+
try {
|
178 |
+
const result = await webcontainer.fs.rm(relativePath, { ...(options || {}) });
|
179 |
+
|
180 |
+
return result;
|
181 |
+
} catch (error) {
|
182 |
+
throw error;
|
183 |
+
}
|
184 |
+
},
|
185 |
+
rmdir: async (path: string, options: any) => {
|
186 |
+
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
187 |
+
|
188 |
+
try {
|
189 |
+
const result = await webcontainer.fs.rm(relativePath, { recursive: true, ...options });
|
190 |
+
|
191 |
+
return result;
|
192 |
+
} catch (error) {
|
193 |
+
throw error;
|
194 |
+
}
|
195 |
+
},
|
196 |
+
unlink: async (path: string) => {
|
197 |
+
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
198 |
+
|
199 |
+
try {
|
200 |
+
return await webcontainer.fs.rm(relativePath, { recursive: false });
|
201 |
+
} catch (error) {
|
202 |
+
throw error;
|
203 |
+
}
|
204 |
+
},
|
205 |
+
stat: async (path: string) => {
|
206 |
+
try {
|
207 |
+
const relativePath = pathUtils.relative(webcontainer.workdir, path);
|
208 |
+
const resp = await webcontainer.fs.readdir(pathUtils.dirname(relativePath), { withFileTypes: true });
|
209 |
+
const name = pathUtils.basename(relativePath);
|
210 |
+
const fileInfo = resp.find((x) => x.name == name);
|
211 |
+
|
212 |
+
if (!fileInfo) {
|
213 |
+
throw new Error(`ENOENT: no such file or directory, stat '${path}'`);
|
214 |
+
}
|
215 |
+
|
216 |
+
return {
|
217 |
+
isFile: () => fileInfo.isFile(),
|
218 |
+
isDirectory: () => fileInfo.isDirectory(),
|
219 |
+
isSymbolicLink: () => false,
|
220 |
+
size: 1,
|
221 |
+
mode: 0o666, // Default permissions
|
222 |
+
mtimeMs: Date.now(),
|
223 |
+
uid: 1000,
|
224 |
+
gid: 1000,
|
225 |
+
};
|
226 |
+
} catch (error: any) {
|
227 |
+
console.log(error?.message);
|
228 |
+
|
229 |
+
const err = new Error(`ENOENT: no such file or directory, stat '${path}'`) as NodeJS.ErrnoException;
|
230 |
+
err.code = 'ENOENT';
|
231 |
+
err.errno = -2;
|
232 |
+
err.syscall = 'stat';
|
233 |
+
err.path = path;
|
234 |
+
throw err;
|
235 |
+
}
|
236 |
+
},
|
237 |
+
lstat: async (path: string) => {
|
238 |
+
return await getFs(webcontainer, record).promises.stat(path);
|
239 |
+
},
|
240 |
+
readlink: async (path: string) => {
|
241 |
+
throw new Error(`EINVAL: invalid argument, readlink '${path}'`);
|
242 |
+
},
|
243 |
+
symlink: async (target: string, path: string) => {
|
244 |
+
/*
|
245 |
+
* Since WebContainer doesn't support symlinks,
|
246 |
+
* we'll throw a "operation not supported" error
|
247 |
+
*/
|
248 |
+
throw new Error(`EPERM: operation not permitted, symlink '${target}' -> '${path}'`);
|
249 |
+
},
|
250 |
+
|
251 |
+
chmod: async (_path: string, _mode: number) => {
|
252 |
+
/*
|
253 |
+
* WebContainer doesn't support changing permissions,
|
254 |
+
* but we can pretend it succeeded for compatibility
|
255 |
+
*/
|
256 |
+
return await Promise.resolve();
|
257 |
+
},
|
258 |
+
},
|
259 |
+
});
|
260 |
+
|
261 |
+
const pathUtils = {
|
262 |
+
dirname: (path: string) => {
|
263 |
+
// Handle empty or just filename cases
|
264 |
+
if (!path || !path.includes('/')) {
|
265 |
+
return '.';
|
266 |
+
}
|
267 |
+
|
268 |
+
// Remove trailing slashes
|
269 |
+
path = path.replace(/\/+$/, '');
|
270 |
+
|
271 |
+
// Get directory part
|
272 |
+
return path.split('/').slice(0, -1).join('/') || '/';
|
273 |
+
},
|
274 |
+
|
275 |
+
basename: (path: string, ext?: string) => {
|
276 |
+
// Remove trailing slashes
|
277 |
+
path = path.replace(/\/+$/, '');
|
278 |
+
|
279 |
+
// Get the last part of the path
|
280 |
+
const base = path.split('/').pop() || '';
|
281 |
+
|
282 |
+
// If extension is provided, remove it from the result
|
283 |
+
if (ext && base.endsWith(ext)) {
|
284 |
+
return base.slice(0, -ext.length);
|
285 |
+
}
|
286 |
+
|
287 |
+
return base;
|
288 |
+
},
|
289 |
+
relative: (from: string, to: string): string => {
|
290 |
+
// Handle empty inputs
|
291 |
+
if (!from || !to) {
|
292 |
+
return '.';
|
293 |
+
}
|
294 |
+
|
295 |
+
// Normalize paths by removing trailing slashes and splitting
|
296 |
+
const normalizePathParts = (p: string) => p.replace(/\/+$/, '').split('/').filter(Boolean);
|
297 |
+
|
298 |
+
const fromParts = normalizePathParts(from);
|
299 |
+
const toParts = normalizePathParts(to);
|
300 |
+
|
301 |
+
// Find common parts at the start of both paths
|
302 |
+
let commonLength = 0;
|
303 |
+
const minLength = Math.min(fromParts.length, toParts.length);
|
304 |
+
|
305 |
+
for (let i = 0; i < minLength; i++) {
|
306 |
+
if (fromParts[i] !== toParts[i]) {
|
307 |
+
break;
|
308 |
+
}
|
309 |
+
|
310 |
+
commonLength++;
|
311 |
+
}
|
312 |
+
|
313 |
+
// Calculate the number of "../" needed
|
314 |
+
const upCount = fromParts.length - commonLength;
|
315 |
+
|
316 |
+
// Get the remaining path parts we need to append
|
317 |
+
const remainingPath = toParts.slice(commonLength);
|
318 |
+
|
319 |
+
// Construct the relative path
|
320 |
+
const relativeParts = [...Array(upCount).fill('..'), ...remainingPath];
|
321 |
+
|
322 |
+
// Handle empty result case
|
323 |
+
return relativeParts.length === 0 ? '.' : relativeParts.join('/');
|
324 |
+
},
|
325 |
+
};
|
app/lib/modules/llm/base-provider.ts
CHANGED
@@ -46,7 +46,7 @@ export abstract class BaseProvider implements ProviderInfo {
|
|
46 |
|
47 |
const apiTokenKey = this.config.apiTokenKey || defaultApiTokenKey;
|
48 |
const apiKey =
|
49 |
-
apiKeys?.[this.name] || serverEnv?.[apiTokenKey] || process?.env?.[apiTokenKey] || manager.env?.[
|
50 |
|
51 |
return {
|
52 |
baseUrl,
|
|
|
46 |
|
47 |
const apiTokenKey = this.config.apiTokenKey || defaultApiTokenKey;
|
48 |
const apiKey =
|
49 |
+
apiKeys?.[this.name] || serverEnv?.[apiTokenKey] || process?.env?.[apiTokenKey] || manager.env?.[apiTokenKey];
|
50 |
|
51 |
return {
|
52 |
baseUrl,
|
app/lib/modules/llm/manager.ts
CHANGED
@@ -83,7 +83,7 @@ export class LLMManager {
|
|
83 |
|
84 |
let enabledProviders = Array.from(this._providers.values()).map((p) => p.name);
|
85 |
|
86 |
-
if (providerSettings) {
|
87 |
enabledProviders = enabledProviders.filter((p) => providerSettings[p].enabled);
|
88 |
}
|
89 |
|
|
|
83 |
|
84 |
let enabledProviders = Array.from(this._providers.values()).map((p) => p.name);
|
85 |
|
86 |
+
if (providerSettings && Object.keys(providerSettings).length > 0) {
|
87 |
enabledProviders = enabledProviders.filter((p) => providerSettings[p].enabled);
|
88 |
}
|
89 |
|
app/lib/modules/llm/providers/amazon-bedrock.ts
ADDED
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { BaseProvider } from '~/lib/modules/llm/base-provider';
|
2 |
+
import type { ModelInfo } from '~/lib/modules/llm/types';
|
3 |
+
import type { LanguageModelV1 } from 'ai';
|
4 |
+
import type { IProviderSetting } from '~/types/model';
|
5 |
+
import { createAmazonBedrock } from '@ai-sdk/amazon-bedrock';
|
6 |
+
|
7 |
+
interface AWSBedRockConfig {
|
8 |
+
region: string;
|
9 |
+
accessKeyId: string;
|
10 |
+
secretAccessKey: string;
|
11 |
+
sessionToken?: string;
|
12 |
+
}
|
13 |
+
|
14 |
+
export default class AmazonBedrockProvider extends BaseProvider {
|
15 |
+
name = 'AmazonBedrock';
|
16 |
+
getApiKeyLink = 'https://console.aws.amazon.com/iam/home';
|
17 |
+
|
18 |
+
config = {
|
19 |
+
apiTokenKey: 'AWS_BEDROCK_CONFIG',
|
20 |
+
};
|
21 |
+
|
22 |
+
staticModels: ModelInfo[] = [
|
23 |
+
{
|
24 |
+
name: 'anthropic.claude-3-5-sonnet-20240620-v1:0',
|
25 |
+
label: 'Claude 3.5 Sonnet (Bedrock)',
|
26 |
+
provider: 'AmazonBedrock',
|
27 |
+
maxTokenAllowed: 4096,
|
28 |
+
},
|
29 |
+
{
|
30 |
+
name: 'anthropic.claude-3-sonnet-20240229-v1:0',
|
31 |
+
label: 'Claude 3 Sonnet (Bedrock)',
|
32 |
+
provider: 'AmazonBedrock',
|
33 |
+
maxTokenAllowed: 4096,
|
34 |
+
},
|
35 |
+
{
|
36 |
+
name: 'anthropic.claude-3-haiku-20240307-v1:0',
|
37 |
+
label: 'Claude 3 Haiku (Bedrock)',
|
38 |
+
provider: 'AmazonBedrock',
|
39 |
+
maxTokenAllowed: 4096,
|
40 |
+
},
|
41 |
+
{
|
42 |
+
name: 'amazon.nova-pro-v1:0',
|
43 |
+
label: 'Amazon Nova Pro (Bedrock)',
|
44 |
+
provider: 'AmazonBedrock',
|
45 |
+
maxTokenAllowed: 5120,
|
46 |
+
},
|
47 |
+
{
|
48 |
+
name: 'amazon.nova-lite-v1:0',
|
49 |
+
label: 'Amazon Nova Lite (Bedrock)',
|
50 |
+
provider: 'AmazonBedrock',
|
51 |
+
maxTokenAllowed: 5120,
|
52 |
+
},
|
53 |
+
{
|
54 |
+
name: 'mistral.mistral-large-2402-v1:0',
|
55 |
+
label: 'Mistral Large 24.02 (Bedrock)',
|
56 |
+
provider: 'AmazonBedrock',
|
57 |
+
maxTokenAllowed: 8192,
|
58 |
+
},
|
59 |
+
];
|
60 |
+
|
61 |
+
private _parseAndValidateConfig(apiKey: string): AWSBedRockConfig {
|
62 |
+
let parsedConfig: AWSBedRockConfig;
|
63 |
+
|
64 |
+
try {
|
65 |
+
parsedConfig = JSON.parse(apiKey);
|
66 |
+
} catch {
|
67 |
+
throw new Error(
|
68 |
+
'Invalid AWS Bedrock configuration format. Please provide a valid JSON string containing region, accessKeyId, and secretAccessKey.',
|
69 |
+
);
|
70 |
+
}
|
71 |
+
|
72 |
+
const { region, accessKeyId, secretAccessKey, sessionToken } = parsedConfig;
|
73 |
+
|
74 |
+
if (!region || !accessKeyId || !secretAccessKey) {
|
75 |
+
throw new Error(
|
76 |
+
'Missing required AWS credentials. Configuration must include region, accessKeyId, and secretAccessKey.',
|
77 |
+
);
|
78 |
+
}
|
79 |
+
|
80 |
+
return {
|
81 |
+
region,
|
82 |
+
accessKeyId,
|
83 |
+
secretAccessKey,
|
84 |
+
...(sessionToken && { sessionToken }),
|
85 |
+
};
|
86 |
+
}
|
87 |
+
|
88 |
+
getModelInstance(options: {
|
89 |
+
model: string;
|
90 |
+
serverEnv: any;
|
91 |
+
apiKeys?: Record<string, string>;
|
92 |
+
providerSettings?: Record<string, IProviderSetting>;
|
93 |
+
}): LanguageModelV1 {
|
94 |
+
const { model, serverEnv, apiKeys, providerSettings } = options;
|
95 |
+
|
96 |
+
const { apiKey } = this.getProviderBaseUrlAndKey({
|
97 |
+
apiKeys,
|
98 |
+
providerSettings: providerSettings?.[this.name],
|
99 |
+
serverEnv: serverEnv as any,
|
100 |
+
defaultBaseUrlKey: '',
|
101 |
+
defaultApiTokenKey: 'AWS_BEDROCK_CONFIG',
|
102 |
+
});
|
103 |
+
|
104 |
+
if (!apiKey) {
|
105 |
+
throw new Error(`Missing API key for ${this.name} provider`);
|
106 |
+
}
|
107 |
+
|
108 |
+
const config = this._parseAndValidateConfig(apiKey);
|
109 |
+
const bedrock = createAmazonBedrock(config);
|
110 |
+
|
111 |
+
return bedrock(model);
|
112 |
+
}
|
113 |
+
}
|
app/lib/modules/llm/providers/anthropic.ts
CHANGED
@@ -1,58 +1,58 @@
|
|
1 |
-
import { BaseProvider } from '~/lib/modules/llm/base-provider';
|
2 |
-
import type { ModelInfo } from '~/lib/modules/llm/types';
|
3 |
-
import type { LanguageModelV1 } from 'ai';
|
4 |
-
import type { IProviderSetting } from '~/types/model';
|
5 |
-
import { createAnthropic } from '@ai-sdk/anthropic';
|
6 |
-
|
7 |
-
export default class AnthropicProvider extends BaseProvider {
|
8 |
-
name = 'Anthropic';
|
9 |
-
getApiKeyLink = 'https://console.anthropic.com/settings/keys';
|
10 |
-
|
11 |
-
config = {
|
12 |
-
apiTokenKey: 'ANTHROPIC_API_KEY',
|
13 |
-
};
|
14 |
-
|
15 |
-
staticModels: ModelInfo[] = [
|
16 |
-
{
|
17 |
-
name: 'claude-3-5-
|
18 |
-
label: 'Claude 3.5
|
19 |
-
provider: 'Anthropic',
|
20 |
-
maxTokenAllowed: 8000,
|
21 |
-
},
|
22 |
-
{
|
23 |
-
name: 'claude-3-5-sonnet-20240620',
|
24 |
-
label: 'Claude 3.5 Sonnet (old)',
|
25 |
-
provider: 'Anthropic',
|
26 |
-
maxTokenAllowed: 8000,
|
27 |
-
},
|
28 |
-
{
|
29 |
-
name: 'claude-3-5-
|
30 |
-
label: 'Claude 3.5
|
31 |
-
provider: 'Anthropic',
|
32 |
-
maxTokenAllowed: 8000,
|
33 |
-
},
|
34 |
-
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
35 |
-
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
36 |
-
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
37 |
-
];
|
38 |
-
getModelInstance: (options: {
|
39 |
-
model: string;
|
40 |
-
serverEnv: Env;
|
41 |
-
apiKeys?: Record<string, string>;
|
42 |
-
providerSettings?: Record<string, IProviderSetting>;
|
43 |
-
}) => LanguageModelV1 = (options) => {
|
44 |
-
const { apiKeys, providerSettings, serverEnv, model } = options;
|
45 |
-
const { apiKey } = this.getProviderBaseUrlAndKey({
|
46 |
-
apiKeys,
|
47 |
-
providerSettings,
|
48 |
-
serverEnv: serverEnv as any,
|
49 |
-
defaultBaseUrlKey: '',
|
50 |
-
defaultApiTokenKey: 'ANTHROPIC_API_KEY',
|
51 |
-
});
|
52 |
-
const anthropic = createAnthropic({
|
53 |
-
apiKey,
|
54 |
-
});
|
55 |
-
|
56 |
-
return anthropic(model, { cacheControl: true });
|
57 |
-
};
|
58 |
-
}
|
|
|
1 |
+
import { BaseProvider } from '~/lib/modules/llm/base-provider';
|
2 |
+
import type { ModelInfo } from '~/lib/modules/llm/types';
|
3 |
+
import type { LanguageModelV1 } from 'ai';
|
4 |
+
import type { IProviderSetting } from '~/types/model';
|
5 |
+
import { createAnthropic } from '@ai-sdk/anthropic';
|
6 |
+
|
7 |
+
export default class AnthropicProvider extends BaseProvider {
|
8 |
+
name = 'Anthropic';
|
9 |
+
getApiKeyLink = 'https://console.anthropic.com/settings/keys';
|
10 |
+
|
11 |
+
config = {
|
12 |
+
apiTokenKey: 'ANTHROPIC_API_KEY',
|
13 |
+
};
|
14 |
+
|
15 |
+
staticModels: ModelInfo[] = [
|
16 |
+
{
|
17 |
+
name: 'claude-3-5-sonnet-latest',
|
18 |
+
label: 'Claude 3.5 Sonnet (new)',
|
19 |
+
provider: 'Anthropic',
|
20 |
+
maxTokenAllowed: 8000,
|
21 |
+
},
|
22 |
+
{
|
23 |
+
name: 'claude-3-5-sonnet-20240620',
|
24 |
+
label: 'Claude 3.5 Sonnet (old)',
|
25 |
+
provider: 'Anthropic',
|
26 |
+
maxTokenAllowed: 8000,
|
27 |
+
},
|
28 |
+
{
|
29 |
+
name: 'claude-3-5-haiku-latest',
|
30 |
+
label: 'Claude 3.5 Haiku (new)',
|
31 |
+
provider: 'Anthropic',
|
32 |
+
maxTokenAllowed: 8000,
|
33 |
+
},
|
34 |
+
{ name: 'claude-3-opus-latest', label: 'Claude 3 Opus', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
35 |
+
{ name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
36 |
+
{ name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic', maxTokenAllowed: 8000 },
|
37 |
+
];
|
38 |
+
getModelInstance: (options: {
|
39 |
+
model: string;
|
40 |
+
serverEnv: Env;
|
41 |
+
apiKeys?: Record<string, string>;
|
42 |
+
providerSettings?: Record<string, IProviderSetting>;
|
43 |
+
}) => LanguageModelV1 = (options) => {
|
44 |
+
const { apiKeys, providerSettings, serverEnv, model } = options;
|
45 |
+
const { apiKey } = this.getProviderBaseUrlAndKey({
|
46 |
+
apiKeys,
|
47 |
+
providerSettings,
|
48 |
+
serverEnv: serverEnv as any,
|
49 |
+
defaultBaseUrlKey: '',
|
50 |
+
defaultApiTokenKey: 'ANTHROPIC_API_KEY',
|
51 |
+
});
|
52 |
+
const anthropic = createAnthropic({
|
53 |
+
apiKey,
|
54 |
+
});
|
55 |
+
|
56 |
+
return anthropic(model, { cacheControl: true });
|
57 |
+
};
|
58 |
+
}
|
app/lib/modules/llm/providers/github.ts
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { BaseProvider } from '~/lib/modules/llm/base-provider';
|
2 |
+
import type { ModelInfo } from '~/lib/modules/llm/types';
|
3 |
+
import type { IProviderSetting } from '~/types/model';
|
4 |
+
import type { LanguageModelV1 } from 'ai';
|
5 |
+
import { createOpenAI } from '@ai-sdk/openai';
|
6 |
+
|
7 |
+
export default class GithubProvider extends BaseProvider {
|
8 |
+
name = 'Github';
|
9 |
+
getApiKeyLink = 'https://github.com/settings/personal-access-tokens';
|
10 |
+
|
11 |
+
config = {
|
12 |
+
apiTokenKey: 'GITHUB_API_KEY',
|
13 |
+
};
|
14 |
+
|
15 |
+
// find more in https://github.com/marketplace?type=models
|
16 |
+
staticModels: ModelInfo[] = [
|
17 |
+
{ name: 'gpt-4o', label: 'GPT-4o', provider: 'Github', maxTokenAllowed: 8000 },
|
18 |
+
{ name: 'o1', label: 'o1-preview', provider: 'Github', maxTokenAllowed: 100000 },
|
19 |
+
{ name: 'o1-mini', label: 'o1-mini', provider: 'Github', maxTokenAllowed: 8000 },
|
20 |
+
{ name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'Github', maxTokenAllowed: 8000 },
|
21 |
+
{ name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'Github', maxTokenAllowed: 8000 },
|
22 |
+
{ name: 'gpt-4', label: 'GPT-4', provider: 'Github', maxTokenAllowed: 8000 },
|
23 |
+
{ name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'Github', maxTokenAllowed: 8000 },
|
24 |
+
];
|
25 |
+
|
26 |
+
getModelInstance(options: {
|
27 |
+
model: string;
|
28 |
+
serverEnv: Env;
|
29 |
+
apiKeys?: Record<string, string>;
|
30 |
+
providerSettings?: Record<string, IProviderSetting>;
|
31 |
+
}): LanguageModelV1 {
|
32 |
+
const { model, serverEnv, apiKeys, providerSettings } = options;
|
33 |
+
|
34 |
+
const { apiKey } = this.getProviderBaseUrlAndKey({
|
35 |
+
apiKeys,
|
36 |
+
providerSettings: providerSettings?.[this.name],
|
37 |
+
serverEnv: serverEnv as any,
|
38 |
+
defaultBaseUrlKey: '',
|
39 |
+
defaultApiTokenKey: 'GITHUB_API_KEY',
|
40 |
+
});
|
41 |
+
|
42 |
+
if (!apiKey) {
|
43 |
+
throw new Error(`Missing API key for ${this.name} provider`);
|
44 |
+
}
|
45 |
+
|
46 |
+
const openai = createOpenAI({
|
47 |
+
baseURL: 'https://models.inference.ai.azure.com',
|
48 |
+
apiKey,
|
49 |
+
});
|
50 |
+
|
51 |
+
return openai(model);
|
52 |
+
}
|
53 |
+
}
|
app/lib/modules/llm/providers/lmstudio.ts
CHANGED
@@ -3,6 +3,7 @@ import type { ModelInfo } from '~/lib/modules/llm/types';
|
|
3 |
import type { IProviderSetting } from '~/types/model';
|
4 |
import { createOpenAI } from '@ai-sdk/openai';
|
5 |
import type { LanguageModelV1 } from 'ai';
|
|
|
6 |
|
7 |
export default class LMStudioProvider extends BaseProvider {
|
8 |
name = 'LMStudio';
|
@@ -22,7 +23,7 @@ export default class LMStudioProvider extends BaseProvider {
|
|
22 |
settings?: IProviderSetting,
|
23 |
serverEnv: Record<string, string> = {},
|
24 |
): Promise<ModelInfo[]> {
|
25 |
-
|
26 |
apiKeys,
|
27 |
providerSettings: settings,
|
28 |
serverEnv,
|
@@ -31,7 +32,18 @@ export default class LMStudioProvider extends BaseProvider {
|
|
31 |
});
|
32 |
|
33 |
if (!baseUrl) {
|
34 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
35 |
}
|
36 |
|
37 |
const response = await fetch(`${baseUrl}/v1/models`);
|
@@ -51,13 +63,26 @@ export default class LMStudioProvider extends BaseProvider {
|
|
51 |
providerSettings?: Record<string, IProviderSetting>;
|
52 |
}) => LanguageModelV1 = (options) => {
|
53 |
const { apiKeys, providerSettings, serverEnv, model } = options;
|
54 |
-
|
55 |
apiKeys,
|
56 |
-
providerSettings,
|
57 |
serverEnv: serverEnv as any,
|
58 |
-
defaultBaseUrlKey: '
|
59 |
defaultApiTokenKey: '',
|
60 |
});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
const lmstudio = createOpenAI({
|
62 |
baseUrl: `${baseUrl}/v1`,
|
63 |
apiKey: '',
|
|
|
3 |
import type { IProviderSetting } from '~/types/model';
|
4 |
import { createOpenAI } from '@ai-sdk/openai';
|
5 |
import type { LanguageModelV1 } from 'ai';
|
6 |
+
import { logger } from '~/utils/logger';
|
7 |
|
8 |
export default class LMStudioProvider extends BaseProvider {
|
9 |
name = 'LMStudio';
|
|
|
23 |
settings?: IProviderSetting,
|
24 |
serverEnv: Record<string, string> = {},
|
25 |
): Promise<ModelInfo[]> {
|
26 |
+
let { baseUrl } = this.getProviderBaseUrlAndKey({
|
27 |
apiKeys,
|
28 |
providerSettings: settings,
|
29 |
serverEnv,
|
|
|
32 |
});
|
33 |
|
34 |
if (!baseUrl) {
|
35 |
+
throw new Error('No baseUrl found for LMStudio provider');
|
36 |
+
}
|
37 |
+
|
38 |
+
if (typeof window === 'undefined') {
|
39 |
+
/*
|
40 |
+
* Running in Server
|
41 |
+
* Backend: Check if we're running in Docker
|
42 |
+
*/
|
43 |
+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
44 |
+
|
45 |
+
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
46 |
+
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
47 |
}
|
48 |
|
49 |
const response = await fetch(`${baseUrl}/v1/models`);
|
|
|
63 |
providerSettings?: Record<string, IProviderSetting>;
|
64 |
}) => LanguageModelV1 = (options) => {
|
65 |
const { apiKeys, providerSettings, serverEnv, model } = options;
|
66 |
+
let { baseUrl } = this.getProviderBaseUrlAndKey({
|
67 |
apiKeys,
|
68 |
+
providerSettings: providerSettings?.[this.name],
|
69 |
serverEnv: serverEnv as any,
|
70 |
+
defaultBaseUrlKey: 'LMSTUDIO_API_BASE_URL',
|
71 |
defaultApiTokenKey: '',
|
72 |
});
|
73 |
+
|
74 |
+
if (!baseUrl) {
|
75 |
+
throw new Error('No baseUrl found for LMStudio provider');
|
76 |
+
}
|
77 |
+
|
78 |
+
if (typeof window === 'undefined') {
|
79 |
+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
80 |
+
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
81 |
+
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
82 |
+
}
|
83 |
+
|
84 |
+
logger.debug('LMStudio Base Url used: ', baseUrl);
|
85 |
+
|
86 |
const lmstudio = createOpenAI({
|
87 |
baseUrl: `${baseUrl}/v1`,
|
88 |
apiKey: '',
|
app/lib/modules/llm/providers/ollama.ts
CHANGED
@@ -3,6 +3,7 @@ import type { ModelInfo } from '~/lib/modules/llm/types';
|
|
3 |
import type { IProviderSetting } from '~/types/model';
|
4 |
import type { LanguageModelV1 } from 'ai';
|
5 |
import { ollama } from 'ollama-ai-provider';
|
|
|
6 |
|
7 |
interface OllamaModelDetails {
|
8 |
parent_model: string;
|
@@ -45,7 +46,7 @@ export default class OllamaProvider extends BaseProvider {
|
|
45 |
settings?: IProviderSetting,
|
46 |
serverEnv: Record<string, string> = {},
|
47 |
): Promise<ModelInfo[]> {
|
48 |
-
|
49 |
apiKeys,
|
50 |
providerSettings: settings,
|
51 |
serverEnv,
|
@@ -54,7 +55,18 @@ export default class OllamaProvider extends BaseProvider {
|
|
54 |
});
|
55 |
|
56 |
if (!baseUrl) {
|
57 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
}
|
59 |
|
60 |
const response = await fetch(`${baseUrl}/api/tags`);
|
@@ -78,18 +90,23 @@ export default class OllamaProvider extends BaseProvider {
|
|
78 |
const { apiKeys, providerSettings, serverEnv, model } = options;
|
79 |
let { baseUrl } = this.getProviderBaseUrlAndKey({
|
80 |
apiKeys,
|
81 |
-
providerSettings,
|
82 |
serverEnv: serverEnv as any,
|
83 |
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
|
84 |
defaultApiTokenKey: '',
|
85 |
});
|
86 |
|
87 |
// Backend: Check if we're running in Docker
|
88 |
-
|
|
|
|
|
89 |
|
|
|
90 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
91 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
92 |
|
|
|
|
|
93 |
const ollamaInstance = ollama(model, {
|
94 |
numCtx: DEFAULT_NUM_CTX,
|
95 |
}) as LanguageModelV1 & { config: any };
|
|
|
3 |
import type { IProviderSetting } from '~/types/model';
|
4 |
import type { LanguageModelV1 } from 'ai';
|
5 |
import { ollama } from 'ollama-ai-provider';
|
6 |
+
import { logger } from '~/utils/logger';
|
7 |
|
8 |
interface OllamaModelDetails {
|
9 |
parent_model: string;
|
|
|
46 |
settings?: IProviderSetting,
|
47 |
serverEnv: Record<string, string> = {},
|
48 |
): Promise<ModelInfo[]> {
|
49 |
+
let { baseUrl } = this.getProviderBaseUrlAndKey({
|
50 |
apiKeys,
|
51 |
providerSettings: settings,
|
52 |
serverEnv,
|
|
|
55 |
});
|
56 |
|
57 |
if (!baseUrl) {
|
58 |
+
throw new Error('No baseUrl found for OLLAMA provider');
|
59 |
+
}
|
60 |
+
|
61 |
+
if (typeof window === 'undefined') {
|
62 |
+
/*
|
63 |
+
* Running in Server
|
64 |
+
* Backend: Check if we're running in Docker
|
65 |
+
*/
|
66 |
+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
67 |
+
|
68 |
+
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
69 |
+
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
70 |
}
|
71 |
|
72 |
const response = await fetch(`${baseUrl}/api/tags`);
|
|
|
90 |
const { apiKeys, providerSettings, serverEnv, model } = options;
|
91 |
let { baseUrl } = this.getProviderBaseUrlAndKey({
|
92 |
apiKeys,
|
93 |
+
providerSettings: providerSettings?.[this.name],
|
94 |
serverEnv: serverEnv as any,
|
95 |
defaultBaseUrlKey: 'OLLAMA_API_BASE_URL',
|
96 |
defaultApiTokenKey: '',
|
97 |
});
|
98 |
|
99 |
// Backend: Check if we're running in Docker
|
100 |
+
if (!baseUrl) {
|
101 |
+
throw new Error('No baseUrl found for OLLAMA provider');
|
102 |
+
}
|
103 |
|
104 |
+
const isDocker = process.env.RUNNING_IN_DOCKER === 'true';
|
105 |
baseUrl = isDocker ? baseUrl.replace('localhost', 'host.docker.internal') : baseUrl;
|
106 |
baseUrl = isDocker ? baseUrl.replace('127.0.0.1', 'host.docker.internal') : baseUrl;
|
107 |
|
108 |
+
logger.debug('Ollama Base Url used: ', baseUrl);
|
109 |
+
|
110 |
const ollamaInstance = ollama(model, {
|
111 |
numCtx: DEFAULT_NUM_CTX,
|
112 |
}) as LanguageModelV1 & { config: any };
|
app/lib/modules/llm/registry.ts
CHANGED
@@ -14,6 +14,8 @@ import PerplexityProvider from './providers/perplexity';
|
|
14 |
import TogetherProvider from './providers/together';
|
15 |
import XAIProvider from './providers/xai';
|
16 |
import HyperbolicProvider from './providers/hyperbolic';
|
|
|
|
|
17 |
|
18 |
export {
|
19 |
AnthropicProvider,
|
@@ -32,4 +34,6 @@ export {
|
|
32 |
XAIProvider,
|
33 |
TogetherProvider,
|
34 |
LMStudioProvider,
|
|
|
|
|
35 |
};
|
|
|
14 |
import TogetherProvider from './providers/together';
|
15 |
import XAIProvider from './providers/xai';
|
16 |
import HyperbolicProvider from './providers/hyperbolic';
|
17 |
+
import AmazonBedrockProvider from './providers/amazon-bedrock';
|
18 |
+
import GithubProvider from './providers/github';
|
19 |
|
20 |
export {
|
21 |
AnthropicProvider,
|
|
|
34 |
XAIProvider,
|
35 |
TogetherProvider,
|
36 |
LMStudioProvider,
|
37 |
+
AmazonBedrockProvider,
|
38 |
+
GithubProvider,
|
39 |
};
|
app/lib/stores/previews.ts
CHANGED
@@ -1,27 +1,192 @@
|
|
1 |
import type { WebContainer } from '@webcontainer/api';
|
2 |
import { atom } from 'nanostores';
|
3 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
export interface PreviewInfo {
|
5 |
port: number;
|
6 |
ready: boolean;
|
7 |
baseUrl: string;
|
8 |
}
|
9 |
|
|
|
|
|
|
|
10 |
export class PreviewsStore {
|
11 |
#availablePreviews = new Map<number, PreviewInfo>();
|
12 |
#webcontainer: Promise<WebContainer>;
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
previews = atom<PreviewInfo[]>([]);
|
15 |
|
16 |
constructor(webcontainerPromise: Promise<WebContainer>) {
|
17 |
this.#webcontainer = webcontainerPromise;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
this.#init();
|
20 |
}
|
21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
async #init() {
|
23 |
const webcontainer = await this.#webcontainer;
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
webcontainer.on('port', (port, type, url) => {
|
26 |
let previewInfo = this.#availablePreviews.get(port);
|
27 |
|
@@ -44,6 +209,101 @@ export class PreviewsStore {
|
|
44 |
previewInfo.baseUrl = url;
|
45 |
|
46 |
this.previews.set([...previews]);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
});
|
48 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
}
|
|
|
1 |
import type { WebContainer } from '@webcontainer/api';
|
2 |
import { atom } from 'nanostores';
|
3 |
|
4 |
+
// Extend Window interface to include our custom property
|
5 |
+
declare global {
|
6 |
+
interface Window {
|
7 |
+
_tabId?: string;
|
8 |
+
}
|
9 |
+
}
|
10 |
+
|
11 |
export interface PreviewInfo {
|
12 |
port: number;
|
13 |
ready: boolean;
|
14 |
baseUrl: string;
|
15 |
}
|
16 |
|
17 |
+
// Create a broadcast channel for preview updates
|
18 |
+
const PREVIEW_CHANNEL = 'preview-updates';
|
19 |
+
|
20 |
export class PreviewsStore {
|
21 |
#availablePreviews = new Map<number, PreviewInfo>();
|
22 |
#webcontainer: Promise<WebContainer>;
|
23 |
+
#broadcastChannel: BroadcastChannel;
|
24 |
+
#lastUpdate = new Map<string, number>();
|
25 |
+
#watchedFiles = new Set<string>();
|
26 |
+
#refreshTimeouts = new Map<string, NodeJS.Timeout>();
|
27 |
+
#REFRESH_DELAY = 300;
|
28 |
+
#storageChannel: BroadcastChannel;
|
29 |
|
30 |
previews = atom<PreviewInfo[]>([]);
|
31 |
|
32 |
constructor(webcontainerPromise: Promise<WebContainer>) {
|
33 |
this.#webcontainer = webcontainerPromise;
|
34 |
+
this.#broadcastChannel = new BroadcastChannel(PREVIEW_CHANNEL);
|
35 |
+
this.#storageChannel = new BroadcastChannel('storage-sync-channel');
|
36 |
+
|
37 |
+
// Listen for preview updates from other tabs
|
38 |
+
this.#broadcastChannel.onmessage = (event) => {
|
39 |
+
const { type, previewId } = event.data;
|
40 |
+
|
41 |
+
if (type === 'file-change') {
|
42 |
+
const timestamp = event.data.timestamp;
|
43 |
+
const lastUpdate = this.#lastUpdate.get(previewId) || 0;
|
44 |
+
|
45 |
+
if (timestamp > lastUpdate) {
|
46 |
+
this.#lastUpdate.set(previewId, timestamp);
|
47 |
+
this.refreshPreview(previewId);
|
48 |
+
}
|
49 |
+
}
|
50 |
+
};
|
51 |
+
|
52 |
+
// Listen for storage sync messages
|
53 |
+
this.#storageChannel.onmessage = (event) => {
|
54 |
+
const { storage, source } = event.data;
|
55 |
+
|
56 |
+
if (storage && source !== this._getTabId()) {
|
57 |
+
this._syncStorage(storage);
|
58 |
+
}
|
59 |
+
};
|
60 |
+
|
61 |
+
// Override localStorage setItem to catch all changes
|
62 |
+
if (typeof window !== 'undefined') {
|
63 |
+
const originalSetItem = localStorage.setItem;
|
64 |
+
|
65 |
+
localStorage.setItem = (...args) => {
|
66 |
+
originalSetItem.apply(localStorage, args);
|
67 |
+
this._broadcastStorageSync();
|
68 |
+
};
|
69 |
+
}
|
70 |
|
71 |
this.#init();
|
72 |
}
|
73 |
|
74 |
+
// Generate a unique ID for this tab
|
75 |
+
private _getTabId(): string {
|
76 |
+
if (typeof window !== 'undefined') {
|
77 |
+
if (!window._tabId) {
|
78 |
+
window._tabId = Math.random().toString(36).substring(2, 15);
|
79 |
+
}
|
80 |
+
|
81 |
+
return window._tabId;
|
82 |
+
}
|
83 |
+
|
84 |
+
return '';
|
85 |
+
}
|
86 |
+
|
87 |
+
// Sync storage data between tabs
|
88 |
+
private _syncStorage(storage: Record<string, string>) {
|
89 |
+
if (typeof window !== 'undefined') {
|
90 |
+
Object.entries(storage).forEach(([key, value]) => {
|
91 |
+
try {
|
92 |
+
const originalSetItem = Object.getPrototypeOf(localStorage).setItem;
|
93 |
+
originalSetItem.call(localStorage, key, value);
|
94 |
+
} catch (error) {
|
95 |
+
console.error('[Preview] Error syncing storage:', error);
|
96 |
+
}
|
97 |
+
});
|
98 |
+
|
99 |
+
// Force a refresh after syncing storage
|
100 |
+
const previews = this.previews.get();
|
101 |
+
previews.forEach((preview) => {
|
102 |
+
const previewId = this.getPreviewId(preview.baseUrl);
|
103 |
+
|
104 |
+
if (previewId) {
|
105 |
+
this.refreshPreview(previewId);
|
106 |
+
}
|
107 |
+
});
|
108 |
+
|
109 |
+
// Reload the page content
|
110 |
+
if (typeof window !== 'undefined' && window.location) {
|
111 |
+
const iframe = document.querySelector('iframe');
|
112 |
+
|
113 |
+
if (iframe) {
|
114 |
+
iframe.src = iframe.src;
|
115 |
+
}
|
116 |
+
}
|
117 |
+
}
|
118 |
+
}
|
119 |
+
|
120 |
+
// Broadcast storage state to other tabs
|
121 |
+
private _broadcastStorageSync() {
|
122 |
+
if (typeof window !== 'undefined') {
|
123 |
+
const storage: Record<string, string> = {};
|
124 |
+
|
125 |
+
for (let i = 0; i < localStorage.length; i++) {
|
126 |
+
const key = localStorage.key(i);
|
127 |
+
|
128 |
+
if (key) {
|
129 |
+
storage[key] = localStorage.getItem(key) || '';
|
130 |
+
}
|
131 |
+
}
|
132 |
+
|
133 |
+
this.#storageChannel.postMessage({
|
134 |
+
type: 'storage-sync',
|
135 |
+
storage,
|
136 |
+
source: this._getTabId(),
|
137 |
+
timestamp: Date.now(),
|
138 |
+
});
|
139 |
+
}
|
140 |
+
}
|
141 |
+
|
142 |
async #init() {
|
143 |
const webcontainer = await this.#webcontainer;
|
144 |
|
145 |
+
// Listen for server ready events
|
146 |
+
webcontainer.on('server-ready', (port, url) => {
|
147 |
+
console.log('[Preview] Server ready on port:', port, url);
|
148 |
+
this.broadcastUpdate(url);
|
149 |
+
|
150 |
+
// Initial storage sync when preview is ready
|
151 |
+
this._broadcastStorageSync();
|
152 |
+
});
|
153 |
+
|
154 |
+
try {
|
155 |
+
// Watch for file changes
|
156 |
+
const watcher = await webcontainer.fs.watch('**/*', { persistent: true });
|
157 |
+
|
158 |
+
// Use the native watch events
|
159 |
+
(watcher as any).addEventListener('change', async () => {
|
160 |
+
const previews = this.previews.get();
|
161 |
+
|
162 |
+
for (const preview of previews) {
|
163 |
+
const previewId = this.getPreviewId(preview.baseUrl);
|
164 |
+
|
165 |
+
if (previewId) {
|
166 |
+
this.broadcastFileChange(previewId);
|
167 |
+
}
|
168 |
+
}
|
169 |
+
});
|
170 |
+
|
171 |
+
// Watch for DOM changes that might affect storage
|
172 |
+
if (typeof window !== 'undefined') {
|
173 |
+
const observer = new MutationObserver((_mutations) => {
|
174 |
+
// Broadcast storage changes when DOM changes
|
175 |
+
this._broadcastStorageSync();
|
176 |
+
});
|
177 |
+
|
178 |
+
observer.observe(document.body, {
|
179 |
+
childList: true,
|
180 |
+
subtree: true,
|
181 |
+
characterData: true,
|
182 |
+
attributes: true,
|
183 |
+
});
|
184 |
+
}
|
185 |
+
} catch (error) {
|
186 |
+
console.error('[Preview] Error setting up watchers:', error);
|
187 |
+
}
|
188 |
+
|
189 |
+
// Listen for port events
|
190 |
webcontainer.on('port', (port, type, url) => {
|
191 |
let previewInfo = this.#availablePreviews.get(port);
|
192 |
|
|
|
209 |
previewInfo.baseUrl = url;
|
210 |
|
211 |
this.previews.set([...previews]);
|
212 |
+
|
213 |
+
if (type === 'open') {
|
214 |
+
this.broadcastUpdate(url);
|
215 |
+
}
|
216 |
+
});
|
217 |
+
}
|
218 |
+
|
219 |
+
// Helper to extract preview ID from URL
|
220 |
+
getPreviewId(url: string): string | null {
|
221 |
+
const match = url.match(/^https?:\/\/([^.]+)\.local-credentialless\.webcontainer-api\.io/);
|
222 |
+
return match ? match[1] : null;
|
223 |
+
}
|
224 |
+
|
225 |
+
// Broadcast state change to all tabs
|
226 |
+
broadcastStateChange(previewId: string) {
|
227 |
+
const timestamp = Date.now();
|
228 |
+
this.#lastUpdate.set(previewId, timestamp);
|
229 |
+
|
230 |
+
this.#broadcastChannel.postMessage({
|
231 |
+
type: 'state-change',
|
232 |
+
previewId,
|
233 |
+
timestamp,
|
234 |
});
|
235 |
}
|
236 |
+
|
237 |
+
// Broadcast file change to all tabs
|
238 |
+
broadcastFileChange(previewId: string) {
|
239 |
+
const timestamp = Date.now();
|
240 |
+
this.#lastUpdate.set(previewId, timestamp);
|
241 |
+
|
242 |
+
this.#broadcastChannel.postMessage({
|
243 |
+
type: 'file-change',
|
244 |
+
previewId,
|
245 |
+
timestamp,
|
246 |
+
});
|
247 |
+
}
|
248 |
+
|
249 |
+
// Broadcast update to all tabs
|
250 |
+
broadcastUpdate(url: string) {
|
251 |
+
const previewId = this.getPreviewId(url);
|
252 |
+
|
253 |
+
if (previewId) {
|
254 |
+
const timestamp = Date.now();
|
255 |
+
this.#lastUpdate.set(previewId, timestamp);
|
256 |
+
|
257 |
+
this.#broadcastChannel.postMessage({
|
258 |
+
type: 'file-change',
|
259 |
+
previewId,
|
260 |
+
timestamp,
|
261 |
+
});
|
262 |
+
}
|
263 |
+
}
|
264 |
+
|
265 |
+
// Method to refresh a specific preview
|
266 |
+
refreshPreview(previewId: string) {
|
267 |
+
// Clear any pending refresh for this preview
|
268 |
+
const existingTimeout = this.#refreshTimeouts.get(previewId);
|
269 |
+
|
270 |
+
if (existingTimeout) {
|
271 |
+
clearTimeout(existingTimeout);
|
272 |
+
}
|
273 |
+
|
274 |
+
// Set a new timeout for this refresh
|
275 |
+
const timeout = setTimeout(() => {
|
276 |
+
const previews = this.previews.get();
|
277 |
+
const preview = previews.find((p) => this.getPreviewId(p.baseUrl) === previewId);
|
278 |
+
|
279 |
+
if (preview) {
|
280 |
+
preview.ready = false;
|
281 |
+
this.previews.set([...previews]);
|
282 |
+
|
283 |
+
requestAnimationFrame(() => {
|
284 |
+
preview.ready = true;
|
285 |
+
this.previews.set([...previews]);
|
286 |
+
});
|
287 |
+
}
|
288 |
+
|
289 |
+
this.#refreshTimeouts.delete(previewId);
|
290 |
+
}, this.#REFRESH_DELAY);
|
291 |
+
|
292 |
+
this.#refreshTimeouts.set(previewId, timeout);
|
293 |
+
}
|
294 |
+
}
|
295 |
+
|
296 |
+
// Create a singleton instance
|
297 |
+
let previewsStore: PreviewsStore | null = null;
|
298 |
+
|
299 |
+
export function usePreviewStore() {
|
300 |
+
if (!previewsStore) {
|
301 |
+
/*
|
302 |
+
* Initialize with a Promise that resolves to WebContainer
|
303 |
+
* This should match how you're initializing WebContainer elsewhere
|
304 |
+
*/
|
305 |
+
previewsStore = new PreviewsStore(Promise.resolve({} as WebContainer));
|
306 |
+
}
|
307 |
+
|
308 |
+
return previewsStore;
|
309 |
}
|
app/lib/stores/theme.ts
CHANGED
@@ -9,16 +9,16 @@ export function themeIsDark() {
|
|
9 |
return themeStore.get() === 'dark';
|
10 |
}
|
11 |
|
12 |
-
export const DEFAULT_THEME = '
|
13 |
|
14 |
export const themeStore = atom<Theme>(initStore());
|
15 |
|
16 |
function initStore() {
|
17 |
if (!import.meta.env.SSR) {
|
18 |
-
const persistedTheme = localStorage.getItem(kTheme)
|
19 |
const themeAttribute = document.querySelector('html')?.getAttribute('data-theme');
|
20 |
|
21 |
-
return persistedTheme ?? (themeAttribute as Theme);
|
22 |
}
|
23 |
|
24 |
return DEFAULT_THEME;
|
|
|
9 |
return themeStore.get() === 'dark';
|
10 |
}
|
11 |
|
12 |
+
export const DEFAULT_THEME = 'light';
|
13 |
|
14 |
export const themeStore = atom<Theme>(initStore());
|
15 |
|
16 |
function initStore() {
|
17 |
if (!import.meta.env.SSR) {
|
18 |
+
const persistedTheme = localStorage.getItem(kTheme) as Theme | undefined;
|
19 |
const themeAttribute = document.querySelector('html')?.getAttribute('data-theme');
|
20 |
|
21 |
+
return persistedTheme ?? (themeAttribute as Theme) ?? DEFAULT_THEME;
|
22 |
}
|
23 |
|
24 |
return DEFAULT_THEME;
|
app/lib/webcontainer/index.ts
CHANGED
@@ -24,6 +24,7 @@ if (!import.meta.env.SSR) {
|
|
24 |
Promise.resolve()
|
25 |
.then(() => {
|
26 |
return WebContainer.boot({
|
|
|
27 |
workdirName: WORK_DIR_NAME,
|
28 |
forwardPreviewErrors: true, // Enable error forwarding from iframes
|
29 |
});
|
|
|
24 |
Promise.resolve()
|
25 |
.then(() => {
|
26 |
return WebContainer.boot({
|
27 |
+
coep: 'credentialless',
|
28 |
workdirName: WORK_DIR_NAME,
|
29 |
forwardPreviewErrors: true, // Enable error forwarding from iframes
|
30 |
});
|
app/routes/api.chat.ts
CHANGED
@@ -1,171 +1,188 @@
|
|
1 |
-
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
2 |
-
import { createDataStream } from 'ai';
|
3 |
-
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
|
4 |
-
import { CONTINUE_PROMPT } from '~/lib/common/prompts/prompts';
|
5 |
-
import { streamText, type Messages, type StreamingOptions } from '~/lib/.server/llm/stream-text';
|
6 |
-
import SwitchableStream from '~/lib/.server/llm/switchable-stream';
|
7 |
-
import type { IProviderSetting } from '~/types/model';
|
8 |
-
import { createScopedLogger } from '~/utils/logger';
|
9 |
-
|
10 |
-
export async function action(args: ActionFunctionArgs) {
|
11 |
-
return chatAction(args);
|
12 |
-
}
|
13 |
-
|
14 |
-
const logger = createScopedLogger('api.chat');
|
15 |
-
|
16 |
-
function parseCookies(cookieHeader: string): Record<string, string> {
|
17 |
-
const cookies: Record<string, string> = {};
|
18 |
-
|
19 |
-
const items = cookieHeader.split(';').map((cookie) => cookie.trim());
|
20 |
-
|
21 |
-
items.forEach((item) => {
|
22 |
-
const [name, ...rest] = item.split('=');
|
23 |
-
|
24 |
-
if (name && rest) {
|
25 |
-
const decodedName = decodeURIComponent(name.trim());
|
26 |
-
const decodedValue = decodeURIComponent(rest.join('=').trim());
|
27 |
-
cookies[decodedName] = decodedValue;
|
28 |
-
}
|
29 |
-
});
|
30 |
-
|
31 |
-
return cookies;
|
32 |
-
}
|
33 |
-
|
34 |
-
async function chatAction({ context, request }: ActionFunctionArgs) {
|
35 |
-
const { messages, files, promptId, contextOptimization, isPromptCachingEnabled } = await request.json<{
|
36 |
-
messages: Messages;
|
37 |
-
files: any;
|
38 |
-
promptId?: string;
|
39 |
-
contextOptimization: boolean;
|
40 |
-
isPromptCachingEnabled: boolean;
|
41 |
-
}>();
|
42 |
-
|
43 |
-
const cookieHeader = request.headers.get('Cookie');
|
44 |
-
const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
|
45 |
-
const providerSettings: Record<string, IProviderSetting> = JSON.parse(
|
46 |
-
parseCookies(cookieHeader || '').providers || '{}',
|
47 |
-
);
|
48 |
-
|
49 |
-
const stream = new SwitchableStream();
|
50 |
-
|
51 |
-
const cumulativeUsage = {
|
52 |
-
completionTokens: 0,
|
53 |
-
promptTokens: 0,
|
54 |
-
totalTokens: 0,
|
55 |
-
};
|
56 |
-
|
57 |
-
try {
|
58 |
-
const options: StreamingOptions = {
|
59 |
-
toolChoice: 'none',
|
60 |
-
// eslint-disable-next-line @typescript-eslint/naming-convention
|
61 |
-
onFinish: async ({ text: content, finishReason, usage, experimental_providerMetadata }) => {
|
62 |
-
logger.debug('usage', JSON.stringify(usage));
|
63 |
-
|
64 |
-
const cacheUsage = experimental_providerMetadata?.anthropic;
|
65 |
-
console.debug({ cacheUsage });
|
66 |
-
|
67 |
-
if (usage) {
|
68 |
-
cumulativeUsage.completionTokens += Math.round(usage.completionTokens || 0);
|
69 |
-
cumulativeUsage.promptTokens += Math.round(
|
70 |
-
(usage.promptTokens || 0) +
|
71 |
-
((cacheUsage?.cacheCreationInputTokens as number) || 0) * 1.25 +
|
72 |
-
((cacheUsage?.cacheReadInputTokens as number) || 0) * 0.1,
|
73 |
-
);
|
74 |
-
cumulativeUsage.totalTokens = cumulativeUsage.completionTokens + cumulativeUsage.promptTokens;
|
75 |
-
}
|
76 |
-
|
77 |
-
if (finishReason !== 'length') {
|
78 |
-
const encoder = new TextEncoder();
|
79 |
-
const usageStream = createDataStream({
|
80 |
-
async execute(dataStream) {
|
81 |
-
dataStream.writeMessageAnnotation({
|
82 |
-
type: 'usage',
|
83 |
-
value: {
|
84 |
-
completionTokens: cumulativeUsage.completionTokens,
|
85 |
-
promptTokens: cumulativeUsage.promptTokens,
|
86 |
-
totalTokens: cumulativeUsage.totalTokens,
|
87 |
-
},
|
88 |
-
});
|
89 |
-
},
|
90 |
-
onError: (error: any) => `Custom error: ${error.message}`,
|
91 |
-
}).pipeThrough(
|
92 |
-
new TransformStream({
|
93 |
-
transform: (chunk, controller) => {
|
94 |
-
// Convert the string stream to a byte stream
|
95 |
-
const str = typeof chunk === 'string' ? chunk : JSON.stringify(chunk);
|
96 |
-
controller.enqueue(encoder.encode(str));
|
97 |
-
},
|
98 |
-
}),
|
99 |
-
);
|
100 |
-
await stream.switchSource(usageStream);
|
101 |
-
await new Promise((resolve) => setTimeout(resolve, 0));
|
102 |
-
stream.close();
|
103 |
-
|
104 |
-
return;
|
105 |
-
}
|
106 |
-
|
107 |
-
if (stream.switches >= MAX_RESPONSE_SEGMENTS) {
|
108 |
-
throw Error('Cannot continue message: Maximum segments reached');
|
109 |
-
}
|
110 |
-
|
111 |
-
const switchesLeft = MAX_RESPONSE_SEGMENTS - stream.switches;
|
112 |
-
|
113 |
-
logger.info(`Reached max token limit (${MAX_TOKENS}): Continuing message (${switchesLeft} switches left)`);
|
114 |
-
|
115 |
-
messages.push({ role: 'assistant', content });
|
116 |
-
messages.push({ role: 'user', content: CONTINUE_PROMPT });
|
117 |
-
|
118 |
-
const result = await streamText({
|
119 |
-
messages,
|
120 |
-
env: context.cloudflare.env,
|
121 |
-
options,
|
122 |
-
apiKeys,
|
123 |
-
files,
|
124 |
-
providerSettings,
|
125 |
-
promptId,
|
126 |
-
contextOptimization,
|
127 |
-
isPromptCachingEnabled,
|
128 |
-
});
|
129 |
-
|
130 |
-
stream.switchSource(result.toDataStream());
|
131 |
-
|
132 |
-
return;
|
133 |
-
},
|
134 |
-
};
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
154 |
-
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
2 |
+
import { createDataStream } from 'ai';
|
3 |
+
import { MAX_RESPONSE_SEGMENTS, MAX_TOKENS } from '~/lib/.server/llm/constants';
|
4 |
+
import { CONTINUE_PROMPT } from '~/lib/common/prompts/prompts';
|
5 |
+
import { streamText, type Messages, type StreamingOptions } from '~/lib/.server/llm/stream-text';
|
6 |
+
import SwitchableStream from '~/lib/.server/llm/switchable-stream';
|
7 |
+
import type { IProviderSetting } from '~/types/model';
|
8 |
+
import { createScopedLogger } from '~/utils/logger';
|
9 |
+
|
10 |
+
export async function action(args: ActionFunctionArgs) {
|
11 |
+
return chatAction(args);
|
12 |
+
}
|
13 |
+
|
14 |
+
const logger = createScopedLogger('api.chat');
|
15 |
+
|
16 |
+
function parseCookies(cookieHeader: string): Record<string, string> {
|
17 |
+
const cookies: Record<string, string> = {};
|
18 |
+
|
19 |
+
const items = cookieHeader.split(';').map((cookie) => cookie.trim());
|
20 |
+
|
21 |
+
items.forEach((item) => {
|
22 |
+
const [name, ...rest] = item.split('=');
|
23 |
+
|
24 |
+
if (name && rest) {
|
25 |
+
const decodedName = decodeURIComponent(name.trim());
|
26 |
+
const decodedValue = decodeURIComponent(rest.join('=').trim());
|
27 |
+
cookies[decodedName] = decodedValue;
|
28 |
+
}
|
29 |
+
});
|
30 |
+
|
31 |
+
return cookies;
|
32 |
+
}
|
33 |
+
|
34 |
+
async function chatAction({ context, request }: ActionFunctionArgs) {
|
35 |
+
const { messages, files, promptId, contextOptimization, isPromptCachingEnabled } = await request.json<{
|
36 |
+
messages: Messages;
|
37 |
+
files: any;
|
38 |
+
promptId?: string;
|
39 |
+
contextOptimization: boolean;
|
40 |
+
isPromptCachingEnabled: boolean;
|
41 |
+
}>();
|
42 |
+
|
43 |
+
const cookieHeader = request.headers.get('Cookie');
|
44 |
+
const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
|
45 |
+
const providerSettings: Record<string, IProviderSetting> = JSON.parse(
|
46 |
+
parseCookies(cookieHeader || '').providers || '{}',
|
47 |
+
);
|
48 |
+
|
49 |
+
const stream = new SwitchableStream();
|
50 |
+
|
51 |
+
const cumulativeUsage = {
|
52 |
+
completionTokens: 0,
|
53 |
+
promptTokens: 0,
|
54 |
+
totalTokens: 0,
|
55 |
+
};
|
56 |
+
|
57 |
+
try {
|
58 |
+
const options: StreamingOptions = {
|
59 |
+
toolChoice: 'none',
|
60 |
+
// eslint-disable-next-line @typescript-eslint/naming-convention
|
61 |
+
onFinish: async ({ text: content, finishReason, usage, experimental_providerMetadata }) => {
|
62 |
+
logger.debug('usage', JSON.stringify(usage));
|
63 |
+
|
64 |
+
const cacheUsage = experimental_providerMetadata?.anthropic;
|
65 |
+
console.debug({ cacheUsage });
|
66 |
+
|
67 |
+
if (usage) {
|
68 |
+
cumulativeUsage.completionTokens += Math.round(usage.completionTokens || 0);
|
69 |
+
cumulativeUsage.promptTokens += Math.round(
|
70 |
+
(usage.promptTokens || 0) +
|
71 |
+
((cacheUsage?.cacheCreationInputTokens as number) || 0) * 1.25 +
|
72 |
+
((cacheUsage?.cacheReadInputTokens as number) || 0) * 0.1,
|
73 |
+
);
|
74 |
+
cumulativeUsage.totalTokens = cumulativeUsage.completionTokens + cumulativeUsage.promptTokens;
|
75 |
+
}
|
76 |
+
|
77 |
+
if (finishReason !== 'length') {
|
78 |
+
const encoder = new TextEncoder();
|
79 |
+
const usageStream = createDataStream({
|
80 |
+
async execute(dataStream) {
|
81 |
+
dataStream.writeMessageAnnotation({
|
82 |
+
type: 'usage',
|
83 |
+
value: {
|
84 |
+
completionTokens: cumulativeUsage.completionTokens,
|
85 |
+
promptTokens: cumulativeUsage.promptTokens,
|
86 |
+
totalTokens: cumulativeUsage.totalTokens,
|
87 |
+
},
|
88 |
+
});
|
89 |
+
},
|
90 |
+
onError: (error: any) => `Custom error: ${error.message}`,
|
91 |
+
}).pipeThrough(
|
92 |
+
new TransformStream({
|
93 |
+
transform: (chunk, controller) => {
|
94 |
+
// Convert the string stream to a byte stream
|
95 |
+
const str = typeof chunk === 'string' ? chunk : JSON.stringify(chunk);
|
96 |
+
controller.enqueue(encoder.encode(str));
|
97 |
+
},
|
98 |
+
}),
|
99 |
+
);
|
100 |
+
await stream.switchSource(usageStream);
|
101 |
+
await new Promise((resolve) => setTimeout(resolve, 0));
|
102 |
+
stream.close();
|
103 |
+
|
104 |
+
return;
|
105 |
+
}
|
106 |
+
|
107 |
+
if (stream.switches >= MAX_RESPONSE_SEGMENTS) {
|
108 |
+
throw Error('Cannot continue message: Maximum segments reached');
|
109 |
+
}
|
110 |
+
|
111 |
+
const switchesLeft = MAX_RESPONSE_SEGMENTS - stream.switches;
|
112 |
+
|
113 |
+
logger.info(`Reached max token limit (${MAX_TOKENS}): Continuing message (${switchesLeft} switches left)`);
|
114 |
+
|
115 |
+
messages.push({ role: 'assistant', content });
|
116 |
+
messages.push({ role: 'user', content: CONTINUE_PROMPT });
|
117 |
+
|
118 |
+
const result = await streamText({
|
119 |
+
messages,
|
120 |
+
env: context.cloudflare.env,
|
121 |
+
options,
|
122 |
+
apiKeys,
|
123 |
+
files,
|
124 |
+
providerSettings,
|
125 |
+
promptId,
|
126 |
+
contextOptimization,
|
127 |
+
isPromptCachingEnabled,
|
128 |
+
});
|
129 |
+
|
130 |
+
stream.switchSource(result.toDataStream());
|
131 |
+
|
132 |
+
return;
|
133 |
+
},
|
134 |
+
};
|
135 |
+
const totalMessageContent = messages.reduce((acc, message) => acc + message.content, '');
|
136 |
+
logger.debug(`Total message length: ${totalMessageContent.split(' ').length}, words`);
|
137 |
+
|
138 |
+
const result = await streamText({
|
139 |
+
messages,
|
140 |
+
env: context.cloudflare.env,
|
141 |
+
options,
|
142 |
+
apiKeys,
|
143 |
+
files,
|
144 |
+
providerSettings,
|
145 |
+
promptId,
|
146 |
+
contextOptimization,
|
147 |
+
isPromptCachingEnabled,
|
148 |
+
});
|
149 |
+
|
150 |
+
(async () => {
|
151 |
+
for await (const part of result.fullStream) {
|
152 |
+
if (part.type === 'error') {
|
153 |
+
const error: any = part.error;
|
154 |
+
logger.error(`${error}`);
|
155 |
+
|
156 |
+
return;
|
157 |
+
}
|
158 |
+
}
|
159 |
+
})();
|
160 |
+
|
161 |
+
stream.switchSource(result.toDataStream());
|
162 |
+
|
163 |
+
// return createrespo
|
164 |
+
return new Response(stream.readable, {
|
165 |
+
status: 200,
|
166 |
+
headers: {
|
167 |
+
'Content-Type': 'text/event-stream; charset=utf-8',
|
168 |
+
Connection: 'keep-alive',
|
169 |
+
'Cache-Control': 'no-cache',
|
170 |
+
'Text-Encoding': 'chunked',
|
171 |
+
},
|
172 |
+
});
|
173 |
+
} catch (error: any) {
|
174 |
+
logger.error(error);
|
175 |
+
|
176 |
+
if (error.message?.includes('API key')) {
|
177 |
+
throw new Response('Invalid or missing API key', {
|
178 |
+
status: 401,
|
179 |
+
statusText: 'Unauthorized',
|
180 |
+
});
|
181 |
+
}
|
182 |
+
|
183 |
+
throw new Response(null, {
|
184 |
+
status: 500,
|
185 |
+
statusText: 'Internal Server Error',
|
186 |
+
});
|
187 |
+
}
|
188 |
+
}
|
app/routes/api.check-env-key.ts
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import type { LoaderFunction } from '@remix-run/cloudflare';
|
2 |
+
import { providerBaseUrlEnvKeys } from '~/utils/constants';
|
3 |
+
|
4 |
+
export const loader: LoaderFunction = async ({ context, request }) => {
|
5 |
+
const url = new URL(request.url);
|
6 |
+
const provider = url.searchParams.get('provider');
|
7 |
+
|
8 |
+
if (!provider || !providerBaseUrlEnvKeys[provider].apiTokenKey) {
|
9 |
+
return Response.json({ isSet: false });
|
10 |
+
}
|
11 |
+
|
12 |
+
const envVarName = providerBaseUrlEnvKeys[provider].apiTokenKey;
|
13 |
+
const isSet = !!(process.env[envVarName] || (context?.cloudflare?.env as Record<string, any>)?.[envVarName]);
|
14 |
+
|
15 |
+
return Response.json({ isSet });
|
16 |
+
};
|
app/routes/api.enhancer.ts
CHANGED
@@ -1,34 +1,13 @@
|
|
1 |
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
2 |
-
|
3 |
-
//import { StreamingTextResponse, parseStreamPart } from 'ai';
|
4 |
import { streamText } from '~/lib/.server/llm/stream-text';
|
5 |
import { stripIndents } from '~/utils/stripIndent';
|
6 |
-
import type {
|
|
|
7 |
|
8 |
export async function action(args: ActionFunctionArgs) {
|
9 |
return enhancerAction(args);
|
10 |
}
|
11 |
|
12 |
-
function parseCookies(cookieHeader: string) {
|
13 |
-
const cookies: any = {};
|
14 |
-
|
15 |
-
// Split the cookie string by semicolons and spaces
|
16 |
-
const items = cookieHeader.split(';').map((cookie) => cookie.trim());
|
17 |
-
|
18 |
-
items.forEach((item) => {
|
19 |
-
const [name, ...rest] = item.split('=');
|
20 |
-
|
21 |
-
if (name && rest) {
|
22 |
-
// Decode the name and value, and join value parts in case it contains '='
|
23 |
-
const decodedName = decodeURIComponent(name.trim());
|
24 |
-
const decodedValue = decodeURIComponent(rest.join('=').trim());
|
25 |
-
cookies[decodedName] = decodedValue;
|
26 |
-
}
|
27 |
-
});
|
28 |
-
|
29 |
-
return cookies;
|
30 |
-
}
|
31 |
-
|
32 |
async function enhancerAction({ context, request }: ActionFunctionArgs) {
|
33 |
const { message, model, provider } = await request.json<{
|
34 |
message: string;
|
@@ -55,12 +34,8 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
|
|
55 |
}
|
56 |
|
57 |
const cookieHeader = request.headers.get('Cookie');
|
58 |
-
|
59 |
-
|
60 |
-
const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
|
61 |
-
const providerSettings: Record<string, IProviderSetting> = JSON.parse(
|
62 |
-
parseCookies(cookieHeader || '').providers || '{}',
|
63 |
-
);
|
64 |
|
65 |
try {
|
66 |
const result = await streamText({
|
@@ -107,7 +82,10 @@ async function enhancerAction({ context, request }: ActionFunctionArgs) {
|
|
107 |
return new Response(result.textStream, {
|
108 |
status: 200,
|
109 |
headers: {
|
110 |
-
'Content-Type': 'text/
|
|
|
|
|
|
|
111 |
},
|
112 |
});
|
113 |
} catch (error: unknown) {
|
|
|
1 |
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
|
|
|
|
2 |
import { streamText } from '~/lib/.server/llm/stream-text';
|
3 |
import { stripIndents } from '~/utils/stripIndent';
|
4 |
+
import type { ProviderInfo } from '~/types/model';
|
5 |
+
import { getApiKeysFromCookie, getProviderSettingsFromCookie } from '~/lib/api/cookies';
|
6 |
|
7 |
export async function action(args: ActionFunctionArgs) {
|
8 |
return enhancerAction(args);
|
9 |
}
|
10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
async function enhancerAction({ context, request }: ActionFunctionArgs) {
|
12 |
const { message, model, provider } = await request.json<{
|
13 |
message: string;
|
|
|
34 |
}
|
35 |
|
36 |
const cookieHeader = request.headers.get('Cookie');
|
37 |
+
const apiKeys = getApiKeysFromCookie(cookieHeader);
|
38 |
+
const providerSettings = getProviderSettingsFromCookie(cookieHeader);
|
|
|
|
|
|
|
|
|
39 |
|
40 |
try {
|
41 |
const result = await streamText({
|
|
|
82 |
return new Response(result.textStream, {
|
83 |
status: 200,
|
84 |
headers: {
|
85 |
+
'Content-Type': 'text/event-stream',
|
86 |
+
Connection: 'keep-alive',
|
87 |
+
'Cache-Control': 'no-cache',
|
88 |
+
'Text-Encoding': 'chunked',
|
89 |
},
|
90 |
});
|
91 |
} catch (error: unknown) {
|
app/routes/api.git-proxy.$.ts
CHANGED
@@ -1,65 +1,65 @@
|
|
1 |
-
import { json } from '@remix-run/cloudflare';
|
2 |
-
import type { ActionFunctionArgs, LoaderFunctionArgs } from '@remix-run/cloudflare';
|
3 |
-
|
4 |
-
// Handle all HTTP methods
|
5 |
-
export async function action({ request, params }: ActionFunctionArgs) {
|
6 |
-
return handleProxyRequest(request, params['*']);
|
7 |
-
}
|
8 |
-
|
9 |
-
export async function loader({ request, params }: LoaderFunctionArgs) {
|
10 |
-
return handleProxyRequest(request, params['*']);
|
11 |
-
}
|
12 |
-
|
13 |
-
async function handleProxyRequest(request: Request, path: string | undefined) {
|
14 |
-
try {
|
15 |
-
if (!path) {
|
16 |
-
return json({ error: 'Invalid proxy URL format' }, { status: 400 });
|
17 |
-
}
|
18 |
-
|
19 |
-
const url = new URL(request.url);
|
20 |
-
|
21 |
-
// Reconstruct the target URL
|
22 |
-
const targetURL = `https://${path}${url.search}`;
|
23 |
-
|
24 |
-
// Forward the request to the target URL
|
25 |
-
const response = await fetch(targetURL, {
|
26 |
-
method: request.method,
|
27 |
-
headers: {
|
28 |
-
...Object.fromEntries(request.headers),
|
29 |
-
|
30 |
-
// Override host header with the target host
|
31 |
-
host: new URL(targetURL).host,
|
32 |
-
},
|
33 |
-
body: ['GET', 'HEAD'].includes(request.method) ? null : await request.arrayBuffer(),
|
34 |
-
});
|
35 |
-
|
36 |
-
// Create response with CORS headers
|
37 |
-
const corsHeaders = {
|
38 |
-
'Access-Control-Allow-Origin': '*',
|
39 |
-
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
40 |
-
'Access-Control-Allow-Headers': '*',
|
41 |
-
};
|
42 |
-
|
43 |
-
// Handle preflight requests
|
44 |
-
if (request.method === 'OPTIONS') {
|
45 |
-
return new Response(null, {
|
46 |
-
headers: corsHeaders,
|
47 |
-
status: 204,
|
48 |
-
});
|
49 |
-
}
|
50 |
-
|
51 |
-
// Forward the response with CORS headers
|
52 |
-
const responseHeaders = new Headers(response.headers);
|
53 |
-
Object.entries(corsHeaders).forEach(([key, value]) => {
|
54 |
-
responseHeaders.set(key, value);
|
55 |
-
});
|
56 |
-
|
57 |
-
return new Response(response.body, {
|
58 |
-
status: response.status,
|
59 |
-
headers: responseHeaders,
|
60 |
-
});
|
61 |
-
} catch (error) {
|
62 |
-
console.error('Git proxy error:', error);
|
63 |
-
return json({ error: 'Proxy error' }, { status: 500 });
|
64 |
-
}
|
65 |
-
}
|
|
|
1 |
+
import { json } from '@remix-run/cloudflare';
|
2 |
+
import type { ActionFunctionArgs, LoaderFunctionArgs } from '@remix-run/cloudflare';
|
3 |
+
|
4 |
+
// Handle all HTTP methods
|
5 |
+
export async function action({ request, params }: ActionFunctionArgs) {
|
6 |
+
return handleProxyRequest(request, params['*']);
|
7 |
+
}
|
8 |
+
|
9 |
+
export async function loader({ request, params }: LoaderFunctionArgs) {
|
10 |
+
return handleProxyRequest(request, params['*']);
|
11 |
+
}
|
12 |
+
|
13 |
+
async function handleProxyRequest(request: Request, path: string | undefined) {
|
14 |
+
try {
|
15 |
+
if (!path) {
|
16 |
+
return json({ error: 'Invalid proxy URL format' }, { status: 400 });
|
17 |
+
}
|
18 |
+
|
19 |
+
const url = new URL(request.url);
|
20 |
+
|
21 |
+
// Reconstruct the target URL
|
22 |
+
const targetURL = `https://${path}${url.search}`;
|
23 |
+
|
24 |
+
// Forward the request to the target URL
|
25 |
+
const response = await fetch(targetURL, {
|
26 |
+
method: request.method,
|
27 |
+
headers: {
|
28 |
+
...Object.fromEntries(request.headers),
|
29 |
+
|
30 |
+
// Override host header with the target host
|
31 |
+
host: new URL(targetURL).host,
|
32 |
+
},
|
33 |
+
body: ['GET', 'HEAD'].includes(request.method) ? null : await request.arrayBuffer(),
|
34 |
+
});
|
35 |
+
|
36 |
+
// Create response with CORS headers
|
37 |
+
const corsHeaders = {
|
38 |
+
'Access-Control-Allow-Origin': '*',
|
39 |
+
'Access-Control-Allow-Methods': 'GET, POST, PUT, DELETE, OPTIONS',
|
40 |
+
'Access-Control-Allow-Headers': '*',
|
41 |
+
};
|
42 |
+
|
43 |
+
// Handle preflight requests
|
44 |
+
if (request.method === 'OPTIONS') {
|
45 |
+
return new Response(null, {
|
46 |
+
headers: corsHeaders,
|
47 |
+
status: 204,
|
48 |
+
});
|
49 |
+
}
|
50 |
+
|
51 |
+
// Forward the response with CORS headers
|
52 |
+
const responseHeaders = new Headers(response.headers);
|
53 |
+
Object.entries(corsHeaders).forEach(([key, value]) => {
|
54 |
+
responseHeaders.set(key, value);
|
55 |
+
});
|
56 |
+
|
57 |
+
return new Response(response.body, {
|
58 |
+
status: response.status,
|
59 |
+
headers: responseHeaders,
|
60 |
+
});
|
61 |
+
} catch (error) {
|
62 |
+
console.error('Git proxy error:', error);
|
63 |
+
return json({ error: 'Proxy error' }, { status: 500 });
|
64 |
+
}
|
65 |
+
}
|
app/routes/api.llmcall.ts
CHANGED
@@ -1,34 +1,24 @@
|
|
1 |
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
2 |
-
|
3 |
-
//import { StreamingTextResponse, parseStreamPart } from 'ai';
|
4 |
import { streamText } from '~/lib/.server/llm/stream-text';
|
5 |
import type { IProviderSetting, ProviderInfo } from '~/types/model';
|
6 |
import { generateText } from 'ai';
|
7 |
-
import {
|
8 |
import { MAX_TOKENS } from '~/lib/.server/llm/constants';
|
|
|
|
|
|
|
9 |
|
10 |
export async function action(args: ActionFunctionArgs) {
|
11 |
return llmCallAction(args);
|
12 |
}
|
13 |
|
14 |
-
function
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
const [name, ...rest] = item.split('=');
|
22 |
-
|
23 |
-
if (name && rest) {
|
24 |
-
// Decode the name and value, and join value parts in case it contains '='
|
25 |
-
const decodedName = decodeURIComponent(name.trim());
|
26 |
-
const decodedValue = decodeURIComponent(rest.join('=').trim());
|
27 |
-
cookies[decodedName] = decodedValue;
|
28 |
-
}
|
29 |
-
});
|
30 |
-
|
31 |
-
return cookies;
|
32 |
}
|
33 |
|
34 |
async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
@@ -58,12 +48,8 @@ async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
|
58 |
}
|
59 |
|
60 |
const cookieHeader = request.headers.get('Cookie');
|
61 |
-
|
62 |
-
|
63 |
-
const apiKeys = JSON.parse(parseCookies(cookieHeader || '').apiKeys || '{}');
|
64 |
-
const providerSettings: Record<string, IProviderSetting> = JSON.parse(
|
65 |
-
parseCookies(cookieHeader || '').providers || '{}',
|
66 |
-
);
|
67 |
|
68 |
if (streamOutput) {
|
69 |
try {
|
@@ -105,8 +91,8 @@ async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
|
105 |
}
|
106 |
} else {
|
107 |
try {
|
108 |
-
const
|
109 |
-
const modelDetails =
|
110 |
|
111 |
if (!modelDetails) {
|
112 |
throw new Error('Model not found');
|
|
|
1 |
import { type ActionFunctionArgs } from '@remix-run/cloudflare';
|
|
|
|
|
2 |
import { streamText } from '~/lib/.server/llm/stream-text';
|
3 |
import type { IProviderSetting, ProviderInfo } from '~/types/model';
|
4 |
import { generateText } from 'ai';
|
5 |
+
import { PROVIDER_LIST } from '~/utils/constants';
|
6 |
import { MAX_TOKENS } from '~/lib/.server/llm/constants';
|
7 |
+
import { LLMManager } from '~/lib/modules/llm/manager';
|
8 |
+
import type { ModelInfo } from '~/lib/modules/llm/types';
|
9 |
+
import { getApiKeysFromCookie, getProviderSettingsFromCookie } from '~/lib/api/cookies';
|
10 |
|
11 |
export async function action(args: ActionFunctionArgs) {
|
12 |
return llmCallAction(args);
|
13 |
}
|
14 |
|
15 |
+
async function getModelList(options: {
|
16 |
+
apiKeys?: Record<string, string>;
|
17 |
+
providerSettings?: Record<string, IProviderSetting>;
|
18 |
+
serverEnv?: Record<string, string>;
|
19 |
+
}) {
|
20 |
+
const llmManager = LLMManager.getInstance(import.meta.env);
|
21 |
+
return llmManager.updateModelList(options);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
}
|
23 |
|
24 |
async function llmCallAction({ context, request }: ActionFunctionArgs) {
|
|
|
48 |
}
|
49 |
|
50 |
const cookieHeader = request.headers.get('Cookie');
|
51 |
+
const apiKeys = getApiKeysFromCookie(cookieHeader);
|
52 |
+
const providerSettings = getProviderSettingsFromCookie(cookieHeader);
|
|
|
|
|
|
|
|
|
53 |
|
54 |
if (streamOutput) {
|
55 |
try {
|
|
|
91 |
}
|
92 |
} else {
|
93 |
try {
|
94 |
+
const models = await getModelList({ apiKeys, providerSettings, serverEnv: context.cloudflare.env as any });
|
95 |
+
const modelDetails = models.find((m: ModelInfo) => m.name === model);
|
96 |
|
97 |
if (!modelDetails) {
|
98 |
throw new Error('Model not found');
|
app/routes/api.models.$provider.ts
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
import { loader } from './api.models';
|
2 |
+
export { loader };
|
app/routes/api.models.ts
CHANGED
@@ -1,6 +1,84 @@
|
|
1 |
import { json } from '@remix-run/cloudflare';
|
2 |
-
import {
|
|
|
|
|
|
|
3 |
|
4 |
-
|
5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
}
|
|
|
1 |
import { json } from '@remix-run/cloudflare';
|
2 |
+
import { LLMManager } from '~/lib/modules/llm/manager';
|
3 |
+
import type { ModelInfo } from '~/lib/modules/llm/types';
|
4 |
+
import type { ProviderInfo } from '~/types/model';
|
5 |
+
import { getApiKeysFromCookie, getProviderSettingsFromCookie } from '~/lib/api/cookies';
|
6 |
|
7 |
+
interface ModelsResponse {
|
8 |
+
modelList: ModelInfo[];
|
9 |
+
providers: ProviderInfo[];
|
10 |
+
defaultProvider: ProviderInfo;
|
11 |
+
}
|
12 |
+
|
13 |
+
let cachedProviders: ProviderInfo[] | null = null;
|
14 |
+
let cachedDefaultProvider: ProviderInfo | null = null;
|
15 |
+
|
16 |
+
function getProviderInfo(llmManager: LLMManager) {
|
17 |
+
if (!cachedProviders) {
|
18 |
+
cachedProviders = llmManager.getAllProviders().map((provider) => ({
|
19 |
+
name: provider.name,
|
20 |
+
staticModels: provider.staticModels,
|
21 |
+
getApiKeyLink: provider.getApiKeyLink,
|
22 |
+
labelForGetApiKey: provider.labelForGetApiKey,
|
23 |
+
icon: provider.icon,
|
24 |
+
}));
|
25 |
+
}
|
26 |
+
|
27 |
+
if (!cachedDefaultProvider) {
|
28 |
+
const defaultProvider = llmManager.getDefaultProvider();
|
29 |
+
cachedDefaultProvider = {
|
30 |
+
name: defaultProvider.name,
|
31 |
+
staticModels: defaultProvider.staticModels,
|
32 |
+
getApiKeyLink: defaultProvider.getApiKeyLink,
|
33 |
+
labelForGetApiKey: defaultProvider.labelForGetApiKey,
|
34 |
+
icon: defaultProvider.icon,
|
35 |
+
};
|
36 |
+
}
|
37 |
+
|
38 |
+
return { providers: cachedProviders, defaultProvider: cachedDefaultProvider };
|
39 |
+
}
|
40 |
+
|
41 |
+
export async function loader({
|
42 |
+
request,
|
43 |
+
params,
|
44 |
+
}: {
|
45 |
+
request: Request;
|
46 |
+
params: { provider?: string };
|
47 |
+
}): Promise<Response> {
|
48 |
+
const llmManager = LLMManager.getInstance(import.meta.env);
|
49 |
+
|
50 |
+
// Get client side maintained API keys and provider settings from cookies
|
51 |
+
const cookieHeader = request.headers.get('Cookie');
|
52 |
+
const apiKeys = getApiKeysFromCookie(cookieHeader);
|
53 |
+
const providerSettings = getProviderSettingsFromCookie(cookieHeader);
|
54 |
+
|
55 |
+
const { providers, defaultProvider } = getProviderInfo(llmManager);
|
56 |
+
|
57 |
+
let modelList: ModelInfo[] = [];
|
58 |
+
|
59 |
+
if (params.provider) {
|
60 |
+
// Only update models for the specific provider
|
61 |
+
const provider = llmManager.getProvider(params.provider);
|
62 |
+
|
63 |
+
if (provider) {
|
64 |
+
const staticModels = provider.staticModels;
|
65 |
+
const dynamicModels = provider.getDynamicModels
|
66 |
+
? await provider.getDynamicModels(apiKeys, providerSettings, import.meta.env)
|
67 |
+
: [];
|
68 |
+
modelList = [...staticModels, ...dynamicModels];
|
69 |
+
}
|
70 |
+
} else {
|
71 |
+
// Update all models
|
72 |
+
modelList = await llmManager.updateModelList({
|
73 |
+
apiKeys,
|
74 |
+
providerSettings,
|
75 |
+
serverEnv: import.meta.env,
|
76 |
+
});
|
77 |
+
}
|
78 |
+
|
79 |
+
return json<ModelsResponse>({
|
80 |
+
modelList,
|
81 |
+
providers,
|
82 |
+
defaultProvider,
|
83 |
+
});
|
84 |
}
|
app/routes/webcontainer.preview.$id.tsx
ADDED
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { json, type LoaderFunctionArgs } from '@remix-run/cloudflare';
|
2 |
+
import { useLoaderData } from '@remix-run/react';
|
3 |
+
import { useCallback, useEffect, useRef, useState } from 'react';
|
4 |
+
|
5 |
+
const PREVIEW_CHANNEL = 'preview-updates';
|
6 |
+
|
7 |
+
export async function loader({ params }: LoaderFunctionArgs) {
|
8 |
+
const previewId = params.id;
|
9 |
+
|
10 |
+
if (!previewId) {
|
11 |
+
throw new Response('Preview ID is required', { status: 400 });
|
12 |
+
}
|
13 |
+
|
14 |
+
return json({ previewId });
|
15 |
+
}
|
16 |
+
|
17 |
+
export default function WebContainerPreview() {
|
18 |
+
const { previewId } = useLoaderData<typeof loader>();
|
19 |
+
const iframeRef = useRef<HTMLIFrameElement>(null);
|
20 |
+
const broadcastChannelRef = useRef<BroadcastChannel>();
|
21 |
+
const [previewUrl, setPreviewUrl] = useState('');
|
22 |
+
|
23 |
+
// Handle preview refresh
|
24 |
+
const handleRefresh = useCallback(() => {
|
25 |
+
if (iframeRef.current && previewUrl) {
|
26 |
+
// Force a clean reload
|
27 |
+
iframeRef.current.src = '';
|
28 |
+
requestAnimationFrame(() => {
|
29 |
+
if (iframeRef.current) {
|
30 |
+
iframeRef.current.src = previewUrl;
|
31 |
+
}
|
32 |
+
});
|
33 |
+
}
|
34 |
+
}, [previewUrl]);
|
35 |
+
|
36 |
+
// Notify other tabs that this preview is ready
|
37 |
+
const notifyPreviewReady = useCallback(() => {
|
38 |
+
if (broadcastChannelRef.current && previewUrl) {
|
39 |
+
broadcastChannelRef.current.postMessage({
|
40 |
+
type: 'preview-ready',
|
41 |
+
previewId,
|
42 |
+
url: previewUrl,
|
43 |
+
timestamp: Date.now(),
|
44 |
+
});
|
45 |
+
}
|
46 |
+
}, [previewId, previewUrl]);
|
47 |
+
|
48 |
+
useEffect(() => {
|
49 |
+
// Initialize broadcast channel
|
50 |
+
broadcastChannelRef.current = new BroadcastChannel(PREVIEW_CHANNEL);
|
51 |
+
|
52 |
+
// Listen for preview updates
|
53 |
+
broadcastChannelRef.current.onmessage = (event) => {
|
54 |
+
if (event.data.previewId === previewId) {
|
55 |
+
if (event.data.type === 'refresh-preview' || event.data.type === 'file-change') {
|
56 |
+
handleRefresh();
|
57 |
+
}
|
58 |
+
}
|
59 |
+
};
|
60 |
+
|
61 |
+
// Construct the WebContainer preview URL
|
62 |
+
const url = `https://${previewId}.local-credentialless.webcontainer-api.io`;
|
63 |
+
setPreviewUrl(url);
|
64 |
+
|
65 |
+
// Set the iframe src
|
66 |
+
if (iframeRef.current) {
|
67 |
+
iframeRef.current.src = url;
|
68 |
+
}
|
69 |
+
|
70 |
+
// Notify other tabs that this preview is ready
|
71 |
+
notifyPreviewReady();
|
72 |
+
|
73 |
+
// Cleanup
|
74 |
+
return () => {
|
75 |
+
broadcastChannelRef.current?.close();
|
76 |
+
};
|
77 |
+
}, [previewId, handleRefresh, notifyPreviewReady]);
|
78 |
+
|
79 |
+
return (
|
80 |
+
<div className="w-full h-full">
|
81 |
+
<iframe
|
82 |
+
ref={iframeRef}
|
83 |
+
title="WebContainer Preview"
|
84 |
+
className="w-full h-full border-none"
|
85 |
+
sandbox="allow-scripts allow-forms allow-popups allow-modals allow-storage-access-by-user-activation allow-same-origin"
|
86 |
+
allow="cross-origin-isolated"
|
87 |
+
loading="eager"
|
88 |
+
onLoad={notifyPreviewReady}
|
89 |
+
/>
|
90 |
+
</div>
|
91 |
+
);
|
92 |
+
}
|
app/utils/constants.ts
CHANGED
@@ -1,7 +1,4 @@
|
|
1 |
-
import type { IProviderSetting } from '~/types/model';
|
2 |
-
|
3 |
import { LLMManager } from '~/lib/modules/llm/manager';
|
4 |
-
import type { ModelInfo } from '~/lib/modules/llm/types';
|
5 |
import type { Template } from '~/types/template';
|
6 |
|
7 |
export const WORK_DIR_NAME = 'project';
|
@@ -17,9 +14,7 @@ const llmManager = LLMManager.getInstance(import.meta.env);
|
|
17 |
export const PROVIDER_LIST = llmManager.getAllProviders();
|
18 |
export const DEFAULT_PROVIDER = llmManager.getDefaultProvider();
|
19 |
|
20 |
-
|
21 |
-
|
22 |
-
const providerBaseUrlEnvKeys: Record<string, { baseUrlKey?: string; apiTokenKey?: string }> = {};
|
23 |
PROVIDER_LIST.forEach((provider) => {
|
24 |
providerBaseUrlEnvKeys[provider.name] = {
|
25 |
baseUrlKey: provider.config.baseUrlKey,
|
@@ -27,34 +22,6 @@ PROVIDER_LIST.forEach((provider) => {
|
|
27 |
};
|
28 |
});
|
29 |
|
30 |
-
// Export the getModelList function using the manager
|
31 |
-
export async function getModelList(options: {
|
32 |
-
apiKeys?: Record<string, string>;
|
33 |
-
providerSettings?: Record<string, IProviderSetting>;
|
34 |
-
serverEnv?: Record<string, string>;
|
35 |
-
}) {
|
36 |
-
return await llmManager.updateModelList(options);
|
37 |
-
}
|
38 |
-
|
39 |
-
async function initializeModelList(options: {
|
40 |
-
env?: Record<string, string>;
|
41 |
-
providerSettings?: Record<string, IProviderSetting>;
|
42 |
-
apiKeys?: Record<string, string>;
|
43 |
-
}): Promise<ModelInfo[]> {
|
44 |
-
const { providerSettings, apiKeys, env } = options;
|
45 |
-
const list = await getModelList({
|
46 |
-
apiKeys,
|
47 |
-
providerSettings,
|
48 |
-
serverEnv: env,
|
49 |
-
});
|
50 |
-
MODEL_LIST = list || MODEL_LIST;
|
51 |
-
|
52 |
-
return list;
|
53 |
-
}
|
54 |
-
|
55 |
-
// initializeModelList({})
|
56 |
-
export { initializeModelList, providerBaseUrlEnvKeys, MODEL_LIST };
|
57 |
-
|
58 |
// starter Templates
|
59 |
|
60 |
export const STARTER_TEMPLATES: Template[] = [
|
|
|
|
|
|
|
1 |
import { LLMManager } from '~/lib/modules/llm/manager';
|
|
|
2 |
import type { Template } from '~/types/template';
|
3 |
|
4 |
export const WORK_DIR_NAME = 'project';
|
|
|
14 |
export const PROVIDER_LIST = llmManager.getAllProviders();
|
15 |
export const DEFAULT_PROVIDER = llmManager.getDefaultProvider();
|
16 |
|
17 |
+
export const providerBaseUrlEnvKeys: Record<string, { baseUrlKey?: string; apiTokenKey?: string }> = {};
|
|
|
|
|
18 |
PROVIDER_LIST.forEach((provider) => {
|
19 |
providerBaseUrlEnvKeys[provider.name] = {
|
20 |
baseUrlKey: provider.config.baseUrlKey,
|
|
|
22 |
};
|
23 |
});
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
// starter Templates
|
26 |
|
27 |
export const STARTER_TEMPLATES: Template[] = [
|
docs/.gitignore
CHANGED
@@ -1,2 +1,3 @@
|
|
1 |
.venv
|
2 |
-
site/
|
|
|
|
1 |
.venv
|
2 |
+
site/
|
3 |
+
.python-version
|
docs/docs/CONTRIBUTING.md
CHANGED
@@ -144,7 +144,7 @@ docker build . --target bolt-ai-development
|
|
144 |
|
145 |
**Option 3: Docker Compose Profile**
|
146 |
```bash
|
147 |
-
docker
|
148 |
```
|
149 |
|
150 |
#### Running the Development Container
|
@@ -171,7 +171,7 @@ docker build . --target bolt-ai-production
|
|
171 |
|
172 |
**Option 3: Docker Compose Profile**
|
173 |
```bash
|
174 |
-
docker
|
175 |
```
|
176 |
|
177 |
#### Running the Production Container
|
|
|
144 |
|
145 |
**Option 3: Docker Compose Profile**
|
146 |
```bash
|
147 |
+
docker compose --profile development up
|
148 |
```
|
149 |
|
150 |
#### Running the Development Container
|
|
|
171 |
|
172 |
**Option 3: Docker Compose Profile**
|
173 |
```bash
|
174 |
+
docker compose --profile production up
|
175 |
```
|
176 |
|
177 |
#### Running the Production Container
|
docs/docs/FAQ.md
CHANGED
@@ -1,91 +1,95 @@
|
|
1 |
# Frequently Asked Questions (FAQ)
|
2 |
|
3 |
-
|
4 |
-
<summary><strong>What are the best models for bolt.diy?</strong></summary>
|
5 |
|
6 |
-
|
|
|
7 |
|
8 |
-
- **Claude 3.5 Sonnet (old)**: Best overall coder, providing excellent results across all use cases
|
9 |
-
- **Gemini 2.0 Flash**: Exceptional speed while maintaining good performance
|
10 |
-
- **GPT-4o**: Strong alternative to Claude 3.5 Sonnet with comparable capabilities
|
11 |
-
- **DeepSeekCoder
|
12 |
-
- **
|
|
|
13 |
|
14 |
-
|
15 |
-
|
16 |
|
17 |
-
|
18 |
-
<summary><strong>How do I get the best results with bolt.diy?</strong></summary>
|
19 |
|
20 |
-
|
21 |
-
|
|
|
22 |
|
23 |
-
- **Use the enhance prompt icon**:
|
24 |
-
|
25 |
|
26 |
-
- **Scaffold the basics first, then add features**:
|
27 |
-
|
28 |
|
29 |
-
- **Batch simple instructions**:
|
30 |
-
|
31 |
-
|
32 |
-
</details>
|
33 |
|
34 |
-
|
35 |
-
<summary><strong>How do I contribute to bolt.diy?</strong></summary>
|
36 |
|
37 |
-
|
38 |
-
|
39 |
|
40 |
-
|
41 |
-
|
|
|
42 |
|
43 |
-
|
44 |
-
|
45 |
-
</details>
|
46 |
|
47 |
-
|
48 |
-
<summary><strong>Why are there so many open issues/pull requests?</strong></summary>
|
49 |
|
50 |
-
|
51 |
|
52 |
-
|
53 |
-
|
54 |
|
55 |
-
|
56 |
-
<summary><strong>How do local LLMs compare to larger models like Claude 3.5 Sonnet for bolt.diy?</strong></summary>
|
57 |
|
58 |
-
|
59 |
-
|
60 |
|
61 |
-
|
62 |
-
<summary><strong>Common Errors and Troubleshooting</strong></summary>
|
63 |
|
64 |
-
|
65 |
-
This generic error message means something went wrong. Check both:
|
66 |
-
- The terminal (if you started the app with Docker or `pnpm`).
|
67 |
-
- The developer console in your browser (press `F12` or right-click > *Inspect*, then go to the *Console* tab).
|
68 |
|
69 |
-
|
70 |
-
This error is sometimes resolved by restarting the Docker container.
|
71 |
-
If that doesn't work, try switching from Docker to `pnpm` or vice versa. We're actively investigating this issue.
|
72 |
|
73 |
-
|
74 |
-
A blank preview often occurs due to hallucinated bad code or incorrect commands.
|
75 |
-
To troubleshoot:
|
76 |
-
- Check the developer console for errors.
|
77 |
-
- Remember, previews are core functionality, so the app isn't broken! We're working on making these errors more transparent.
|
78 |
|
79 |
-
|
80 |
-
Local LLMs like Qwen-2.5-Coder are powerful for small applications but still experimental for larger projects. For better results, consider using larger models like GPT-4o, Claude 3.5 Sonnet, or DeepSeek Coder V2 236b.
|
81 |
|
82 |
-
|
83 |
-
If you are getting this, you are probably on Windows. The fix is generally to update the [Visual C++ Redistributable](https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170)
|
84 |
|
85 |
-
|
86 |
-
|
87 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
88 |
|
89 |
---
|
90 |
|
91 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
# Frequently Asked Questions (FAQ)
|
2 |
|
3 |
+
## Models and Setup
|
|
|
4 |
|
5 |
+
??? question "What are the best models for bolt.diy?"
|
6 |
+
For the best experience with bolt.diy, we recommend using the following models:
|
7 |
|
8 |
+
- **Claude 3.5 Sonnet (old)**: Best overall coder, providing excellent results across all use cases
|
9 |
+
- **Gemini 2.0 Flash**: Exceptional speed while maintaining good performance
|
10 |
+
- **GPT-4o**: Strong alternative to Claude 3.5 Sonnet with comparable capabilities
|
11 |
+
- **DeepSeekCoder V3**: Best open source model (available through OpenRouter, DeepSeek API, or self-hosted)
|
12 |
+
- **DeepSeekCoder V2 236b**: available through OpenRouter, DeepSeek API, or self-hosted
|
13 |
+
- **Qwen 2.5 Coder 32b**: Best model for self-hosting with reasonable hardware requirements
|
14 |
|
15 |
+
!!! warning
|
16 |
+
Models with less than 7b parameters typically lack the capability to properly interact with bolt!
|
17 |
|
18 |
+
## Best Practices
|
|
|
19 |
|
20 |
+
??? question "How do I get the best results with bolt.diy?"
|
21 |
+
- **Be specific about your stack**:
|
22 |
+
Mention the frameworks or libraries you want to use (e.g., Astro, Tailwind, ShadCN) in your initial prompt. This ensures that bolt.diy scaffolds the project according to your preferences.
|
23 |
|
24 |
+
- **Use the enhance prompt icon**:
|
25 |
+
Before sending your prompt, click the *enhance* icon to let the AI refine your prompt. You can edit the suggested improvements before submitting.
|
26 |
|
27 |
+
- **Scaffold the basics first, then add features**:
|
28 |
+
Ensure the foundational structure of your application is in place before introducing advanced functionality. This helps bolt.diy establish a solid base to build on.
|
29 |
|
30 |
+
- **Batch simple instructions**:
|
31 |
+
Combine simple tasks into a single prompt to save time and reduce API credit consumption. For example:
|
32 |
+
*"Change the color scheme, add mobile responsiveness, and restart the dev server."*
|
|
|
33 |
|
34 |
+
## Project Information
|
|
|
35 |
|
36 |
+
??? question "How do I contribute to bolt.diy?"
|
37 |
+
Check out our [Contribution Guide](CONTRIBUTING.md) for more details on how to get involved!
|
38 |
|
39 |
+
??? question "What are the future plans for bolt.diy?"
|
40 |
+
Visit our [Roadmap](https://roadmap.sh/r/ottodev-roadmap-2ovzo) for the latest updates.
|
41 |
+
New features and improvements are on the way!
|
42 |
|
43 |
+
??? question "Why are there so many open issues/pull requests?"
|
44 |
+
bolt.diy began as a small showcase project on @ColeMedin's YouTube channel to explore editing open-source projects with local LLMs. However, it quickly grew into a massive community effort!
|
|
|
45 |
|
46 |
+
We're forming a team of maintainers to manage demand and streamline issue resolution. The maintainers are rockstars, and we're also exploring partnerships to help the project thrive.
|
|
|
47 |
|
48 |
+
## Model Comparisons
|
49 |
|
50 |
+
??? question "How do local LLMs compare to larger models like Claude 3.5 Sonnet for bolt.diy?"
|
51 |
+
While local LLMs are improving rapidly, larger models like GPT-4o, Claude 3.5 Sonnet, and DeepSeek Coder V2 236b still offer the best results for complex applications. Our ongoing focus is to improve prompts, agents, and the platform to better support smaller local LLMs.
|
52 |
|
53 |
+
## Troubleshooting
|
|
|
54 |
|
55 |
+
??? error "There was an error processing this request"
|
56 |
+
This generic error message means something went wrong. Check both:
|
57 |
|
58 |
+
- The terminal (if you started the app with Docker or `pnpm`).
|
|
|
59 |
|
60 |
+
- The developer console in your browser (press `F12` or right-click > *Inspect*, then go to the *Console* tab).
|
|
|
|
|
|
|
61 |
|
62 |
+
??? error "x-api-key header missing"
|
63 |
+
This error is sometimes resolved by restarting the Docker container.
|
64 |
+
If that doesn't work, try switching from Docker to `pnpm` or vice versa. We're actively investigating this issue.
|
65 |
|
66 |
+
??? error "Blank preview when running the app"
|
67 |
+
A blank preview often occurs due to hallucinated bad code or incorrect commands.
|
68 |
+
To troubleshoot:
|
|
|
|
|
69 |
|
70 |
+
- Check the developer console for errors.
|
|
|
71 |
|
72 |
+
- Remember, previews are core functionality, so the app isn't broken! We're working on making these errors more transparent.
|
|
|
73 |
|
74 |
+
??? error "Everything works, but the results are bad"
|
75 |
+
Local LLMs like Qwen-2.5-Coder are powerful for small applications but still experimental for larger projects. For better results, consider using larger models like
|
76 |
+
|
77 |
+
- GPT-4o
|
78 |
+
- Claude 3.5 Sonnet
|
79 |
+
- DeepSeek Coder V2 236b
|
80 |
+
|
81 |
+
??? error "Received structured exception #0xc0000005: access violation"
|
82 |
+
If you are getting this, you are probably on Windows. The fix is generally to update the [Visual C++ Redistributable](https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170)
|
83 |
+
|
84 |
+
??? error "Miniflare or Wrangler errors in Windows"
|
85 |
+
You will need to make sure you have the latest version of Visual Studio C++ installed (14.40.33816), more information here <a href="https://github.com/stackblitz-labs/bolt.diy/issues/19">Github Issues</a>
|
86 |
|
87 |
---
|
88 |
|
89 |
+
## Get Help & Support
|
90 |
+
|
91 |
+
!!! tip "Community Support"
|
92 |
+
[Join the bolt.diy Community](https://thinktank.ottomator.ai/c/bolt-diy/17){target=_blank} for discussions and help
|
93 |
+
|
94 |
+
!!! bug "Report Issues"
|
95 |
+
[Open an Issue](https://github.com/stackblitz-labs/bolt.diy/issues/19){target=_blank} in our GitHub Repository
|
docs/docs/index.md
CHANGED
@@ -25,6 +25,8 @@ bolt.diy allows you to choose the LLM that you use for each prompt! Currently, y
|
|
25 |
|
26 |
[Join the community!](https://thinktank.ottomator.ai)
|
27 |
|
|
|
|
|
28 |
---
|
29 |
|
30 |
## Features
|
@@ -154,7 +156,7 @@ Once you've configured your keys, the application will be ready to use the selec
|
|
154 |
2. **Run the Container**:
|
155 |
Use Docker Compose profiles to manage environments:
|
156 |
```bash
|
157 |
-
docker
|
158 |
```
|
159 |
|
160 |
- With the development profile, changes to your code will automatically reflect in the running container (hot reloading).
|
@@ -186,7 +188,7 @@ To keep your local version of bolt.diy up to date with the latest changes, follo
|
|
186 |
|
187 |
- **If using Docker**, ensure you rebuild the Docker image to avoid using a cached version:
|
188 |
```bash
|
189 |
-
docker
|
190 |
```
|
191 |
|
192 |
- **If not using Docker**, you can start the application as usual with:
|
|
|
25 |
|
26 |
[Join the community!](https://thinktank.ottomator.ai)
|
27 |
|
28 |
+
Also [this pinned post in our community](https://thinktank.ottomator.ai/t/videos-tutorial-helpful-content/3243) has a bunch of incredible resources for running and deploying bolt.diy yourself!
|
29 |
+
|
30 |
---
|
31 |
|
32 |
## Features
|
|
|
156 |
2. **Run the Container**:
|
157 |
Use Docker Compose profiles to manage environments:
|
158 |
```bash
|
159 |
+
docker compose --profile development up
|
160 |
```
|
161 |
|
162 |
- With the development profile, changes to your code will automatically reflect in the running container (hot reloading).
|
|
|
188 |
|
189 |
- **If using Docker**, ensure you rebuild the Docker image to avoid using a cached version:
|
190 |
```bash
|
191 |
+
docker compose --profile development up --build
|
192 |
```
|
193 |
|
194 |
- **If not using Docker**, you can start the application as usual with:
|
docs/mkdocs.yml
CHANGED
@@ -65,4 +65,12 @@ markdown_extensions:
|
|
65 |
- pymdownx.details
|
66 |
- pymdownx.superfences
|
67 |
- pymdownx.mark
|
68 |
-
- attr_list
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
- pymdownx.details
|
66 |
- pymdownx.superfences
|
67 |
- pymdownx.mark
|
68 |
+
- attr_list
|
69 |
+
- md_in_html
|
70 |
+
- tables
|
71 |
+
- def_list
|
72 |
+
- admonition
|
73 |
+
- pymdownx.tasklist:
|
74 |
+
custom_checkbox: true
|
75 |
+
- toc:
|
76 |
+
permalink: true
|
messages.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
package.json
CHANGED
@@ -1,138 +1,139 @@
|
|
1 |
-
{
|
2 |
-
"name": "bolt",
|
3 |
-
"description": "An AI Agent",
|
4 |
-
"private": true,
|
5 |
-
"license": "MIT",
|
6 |
-
"sideEffects": false,
|
7 |
-
"type": "module",
|
8 |
-
"version": "0.0.5",
|
9 |
-
"scripts": {
|
10 |
-
"deploy": "npm run build && wrangler pages deploy",
|
11 |
-
"build": "remix vite:build",
|
12 |
-
"dev": "node pre-start.cjs && remix vite:dev",
|
13 |
-
"test": "vitest --run",
|
14 |
-
"test:watch": "vitest",
|
15 |
-
"lint": "eslint --cache --cache-location ./node_modules/.cache/eslint app",
|
16 |
-
"lint:fix": "npm run lint -- --fix && prettier app --write",
|
17 |
-
"start:windows": "wrangler pages dev ./build/client",
|
18 |
-
"start:unix": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings",
|
19 |
-
"start": "node -e \"const { spawn } = require('child_process'); const isWindows = process.platform === 'win32'; const cmd = isWindows ? 'npm run start:windows' : 'npm run start:unix'; const child = spawn(cmd, { shell: true, stdio: 'inherit' }); child.on('exit', code => process.exit(code));\"",
|
20 |
-
"dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session",
|
21 |
-
"dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai",
|
22 |
-
"dockerbuild:prod": "docker build -t bolt-ai:production -t bolt-ai:latest --target bolt-ai-production .",
|
23 |
-
"dockerbuild": "docker build -t bolt-ai:development -t bolt-ai:latest --target bolt-ai-development .",
|
24 |
-
"typecheck": "tsc",
|
25 |
-
"typegen": "wrangler types",
|
26 |
-
"preview": "pnpm run build && pnpm run start",
|
27 |
-
"prepare": "husky"
|
28 |
-
},
|
29 |
-
"engines": {
|
30 |
-
"node": ">=18.18.0"
|
31 |
-
},
|
32 |
-
"dependencies": {
|
33 |
-
"@ai-sdk/
|
34 |
-
"@ai-sdk/
|
35 |
-
"@ai-sdk/
|
36 |
-
"@ai-sdk/
|
37 |
-
"@ai-sdk/
|
38 |
-
"@
|
39 |
-
"@codemirror/
|
40 |
-
"@codemirror/
|
41 |
-
"@codemirror/lang-
|
42 |
-
"@codemirror/lang-
|
43 |
-
"@codemirror/lang-
|
44 |
-
"@codemirror/lang-
|
45 |
-
"@codemirror/lang-
|
46 |
-
"@codemirror/lang-
|
47 |
-
"@codemirror/lang-
|
48 |
-
"@codemirror/lang-
|
49 |
-
"@codemirror/lang-
|
50 |
-
"@codemirror/
|
51 |
-
"@codemirror/
|
52 |
-
"@codemirror/
|
53 |
-
"@codemirror/
|
54 |
-
"@
|
55 |
-
"@iconify-json/
|
56 |
-
"@
|
57 |
-
"@
|
58 |
-
"@
|
59 |
-
"@octokit/
|
60 |
-
"@
|
61 |
-
"@
|
62 |
-
"@radix-ui/react-
|
63 |
-
"@radix-ui/react-
|
64 |
-
"@radix-ui/react-
|
65 |
-
"@radix-ui/react-
|
66 |
-
"@radix-ui/react-
|
67 |
-
"@
|
68 |
-
"@remix-run/cloudflare
|
69 |
-
"@remix-run/
|
70 |
-
"@
|
71 |
-
"@
|
72 |
-
"@
|
73 |
-
"@
|
74 |
-
"@xterm/addon-
|
75 |
-
"@xterm/
|
76 |
-
"
|
77 |
-
"ai": "^4.0.22",
|
78 |
-
"
|
79 |
-
"
|
80 |
-
"
|
81 |
-
"
|
82 |
-
"
|
83 |
-
"
|
84 |
-
"
|
85 |
-
"
|
86 |
-
"
|
87 |
-
"
|
88 |
-
"
|
89 |
-
"
|
90 |
-
"
|
91 |
-
"
|
92 |
-
"
|
93 |
-
"react
|
94 |
-
"react-
|
95 |
-
"react-
|
96 |
-
"react-
|
97 |
-
"react-
|
98 |
-
"
|
99 |
-
"rehype-
|
100 |
-
"
|
101 |
-
"
|
102 |
-
"remix-
|
103 |
-
"
|
104 |
-
"
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
"@
|
109 |
-
"@
|
110 |
-
"@
|
111 |
-
"@types/
|
112 |
-
"@types/
|
113 |
-
"@types/
|
114 |
-
"@types/
|
115 |
-
"@types/react
|
116 |
-
"
|
117 |
-
"
|
118 |
-
"
|
119 |
-
"
|
120 |
-
"
|
121 |
-
"
|
122 |
-
"
|
123 |
-
"
|
124 |
-
"
|
125 |
-
"
|
126 |
-
"
|
127 |
-
"vite
|
128 |
-
"vite-plugin-
|
129 |
-
"vite-
|
130 |
-
"
|
131 |
-
"
|
132 |
-
"
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
|
|
|
1 |
+
{
|
2 |
+
"name": "bolt",
|
3 |
+
"description": "An AI Agent",
|
4 |
+
"private": true,
|
5 |
+
"license": "MIT",
|
6 |
+
"sideEffects": false,
|
7 |
+
"type": "module",
|
8 |
+
"version": "0.0.5",
|
9 |
+
"scripts": {
|
10 |
+
"deploy": "npm run build && wrangler pages deploy",
|
11 |
+
"build": "remix vite:build",
|
12 |
+
"dev": "node pre-start.cjs && remix vite:dev",
|
13 |
+
"test": "vitest --run",
|
14 |
+
"test:watch": "vitest",
|
15 |
+
"lint": "eslint --cache --cache-location ./node_modules/.cache/eslint app",
|
16 |
+
"lint:fix": "npm run lint -- --fix && prettier app --write",
|
17 |
+
"start:windows": "wrangler pages dev ./build/client",
|
18 |
+
"start:unix": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings",
|
19 |
+
"start": "node -e \"const { spawn } = require('child_process'); const isWindows = process.platform === 'win32'; const cmd = isWindows ? 'npm run start:windows' : 'npm run start:unix'; const child = spawn(cmd, { shell: true, stdio: 'inherit' }); child.on('exit', code => process.exit(code));\"",
|
20 |
+
"dockerstart": "bindings=$(./bindings.sh) && wrangler pages dev ./build/client $bindings --ip 0.0.0.0 --port 5173 --no-show-interactive-dev-session",
|
21 |
+
"dockerrun": "docker run -it -d --name bolt-ai-live -p 5173:5173 --env-file .env.local bolt-ai",
|
22 |
+
"dockerbuild:prod": "docker build -t bolt-ai:production -t bolt-ai:latest --target bolt-ai-production .",
|
23 |
+
"dockerbuild": "docker build -t bolt-ai:development -t bolt-ai:latest --target bolt-ai-development .",
|
24 |
+
"typecheck": "tsc",
|
25 |
+
"typegen": "wrangler types",
|
26 |
+
"preview": "pnpm run build && pnpm run start",
|
27 |
+
"prepare": "husky"
|
28 |
+
},
|
29 |
+
"engines": {
|
30 |
+
"node": ">=18.18.0"
|
31 |
+
},
|
32 |
+
"dependencies": {
|
33 |
+
"@ai-sdk/amazon-bedrock": "1.0.6",
|
34 |
+
"@ai-sdk/anthropic": "^1.0.6",
|
35 |
+
"@ai-sdk/cohere": "^1.0.3",
|
36 |
+
"@ai-sdk/google": "^0.0.52",
|
37 |
+
"@ai-sdk/mistral": "^0.0.43",
|
38 |
+
"@ai-sdk/openai": "^0.0.66",
|
39 |
+
"@codemirror/autocomplete": "^6.18.3",
|
40 |
+
"@codemirror/commands": "^6.7.1",
|
41 |
+
"@codemirror/lang-cpp": "^6.0.2",
|
42 |
+
"@codemirror/lang-css": "^6.3.1",
|
43 |
+
"@codemirror/lang-html": "^6.4.9",
|
44 |
+
"@codemirror/lang-javascript": "^6.2.2",
|
45 |
+
"@codemirror/lang-json": "^6.0.1",
|
46 |
+
"@codemirror/lang-markdown": "^6.3.1",
|
47 |
+
"@codemirror/lang-python": "^6.1.6",
|
48 |
+
"@codemirror/lang-sass": "^6.0.2",
|
49 |
+
"@codemirror/lang-vue": "^0.1.3",
|
50 |
+
"@codemirror/lang-wast": "^6.0.2",
|
51 |
+
"@codemirror/language": "^6.10.6",
|
52 |
+
"@codemirror/search": "^6.5.8",
|
53 |
+
"@codemirror/state": "^6.4.1",
|
54 |
+
"@codemirror/view": "^6.35.0",
|
55 |
+
"@iconify-json/ph": "^1.2.1",
|
56 |
+
"@iconify-json/svg-spinners": "^1.2.1",
|
57 |
+
"@lezer/highlight": "^1.2.1",
|
58 |
+
"@nanostores/react": "^0.7.3",
|
59 |
+
"@octokit/rest": "^21.0.2",
|
60 |
+
"@octokit/types": "^13.6.2",
|
61 |
+
"@openrouter/ai-sdk-provider": "^0.0.5",
|
62 |
+
"@radix-ui/react-context-menu": "^2.2.2",
|
63 |
+
"@radix-ui/react-dialog": "^1.1.2",
|
64 |
+
"@radix-ui/react-dropdown-menu": "^2.1.2",
|
65 |
+
"@radix-ui/react-separator": "^1.1.0",
|
66 |
+
"@radix-ui/react-switch": "^1.1.1",
|
67 |
+
"@radix-ui/react-tooltip": "^1.1.4",
|
68 |
+
"@remix-run/cloudflare": "^2.15.0",
|
69 |
+
"@remix-run/cloudflare-pages": "^2.15.0",
|
70 |
+
"@remix-run/react": "^2.15.0",
|
71 |
+
"@uiw/codemirror-theme-vscode": "^4.23.6",
|
72 |
+
"@unocss/reset": "^0.61.9",
|
73 |
+
"@webcontainer/api": "1.3.0-internal.10",
|
74 |
+
"@xterm/addon-fit": "^0.10.0",
|
75 |
+
"@xterm/addon-web-links": "^0.11.0",
|
76 |
+
"@xterm/xterm": "^5.5.0",
|
77 |
+
"ai": "^4.0.22",
|
78 |
+
"chalk": "^5.4.1",
|
79 |
+
"date-fns": "^3.6.0",
|
80 |
+
"diff": "^5.2.0",
|
81 |
+
"dotenv": "^16.4.7",
|
82 |
+
"file-saver": "^2.0.5",
|
83 |
+
"framer-motion": "^11.12.0",
|
84 |
+
"ignore": "^6.0.2",
|
85 |
+
"isbot": "^4.4.0",
|
86 |
+
"isomorphic-git": "^1.27.2",
|
87 |
+
"istextorbinary": "^9.5.0",
|
88 |
+
"jose": "^5.9.6",
|
89 |
+
"js-cookie": "^3.0.5",
|
90 |
+
"jszip": "^3.10.1",
|
91 |
+
"nanostores": "^0.10.3",
|
92 |
+
"ollama-ai-provider": "^0.15.2",
|
93 |
+
"react": "^18.3.1",
|
94 |
+
"react-dom": "^18.3.1",
|
95 |
+
"react-hotkeys-hook": "^4.6.1",
|
96 |
+
"react-markdown": "^9.0.1",
|
97 |
+
"react-resizable-panels": "^2.1.7",
|
98 |
+
"react-toastify": "^10.0.6",
|
99 |
+
"rehype-raw": "^7.0.0",
|
100 |
+
"rehype-sanitize": "^6.0.0",
|
101 |
+
"remark-gfm": "^4.0.0",
|
102 |
+
"remix-island": "^0.2.0",
|
103 |
+
"remix-utils": "^7.7.0",
|
104 |
+
"shiki": "^1.24.0",
|
105 |
+
"unist-util-visit": "^5.0.0"
|
106 |
+
},
|
107 |
+
"devDependencies": {
|
108 |
+
"@blitz/eslint-plugin": "0.1.0",
|
109 |
+
"@cloudflare/workers-types": "^4.20241127.0",
|
110 |
+
"@remix-run/dev": "^2.15.0",
|
111 |
+
"@types/diff": "^5.2.3",
|
112 |
+
"@types/dom-speech-recognition": "^0.0.4",
|
113 |
+
"@types/file-saver": "^2.0.7",
|
114 |
+
"@types/js-cookie": "^3.0.6",
|
115 |
+
"@types/react": "^18.3.12",
|
116 |
+
"@types/react-dom": "^18.3.1",
|
117 |
+
"fast-glob": "^3.3.2",
|
118 |
+
"husky": "9.1.7",
|
119 |
+
"is-ci": "^3.0.1",
|
120 |
+
"node-fetch": "^3.3.2",
|
121 |
+
"pnpm": "^9.14.4",
|
122 |
+
"prettier": "^3.4.1",
|
123 |
+
"sass-embedded": "^1.81.0",
|
124 |
+
"typescript": "^5.7.2",
|
125 |
+
"unified": "^11.0.5",
|
126 |
+
"unocss": "^0.61.9",
|
127 |
+
"vite": "^5.4.11",
|
128 |
+
"vite-plugin-node-polyfills": "^0.22.0",
|
129 |
+
"vite-plugin-optimize-css-modules": "^1.1.0",
|
130 |
+
"vite-tsconfig-paths": "^4.3.2",
|
131 |
+
"vitest": "^2.1.7",
|
132 |
+
"wrangler": "^3.91.0",
|
133 |
+
"zod": "^3.23.8"
|
134 |
+
},
|
135 |
+
"resolutions": {
|
136 |
+
"@typescript-eslint/utils": "^8.0.0-alpha.30"
|
137 |
+
},
|
138 |
+
"packageManager": "[email protected]"
|
139 |
+
}
|
pnpm-lock.yaml
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
public/icons/AmazonBedrock.svg
ADDED
|
vite.config.ts
CHANGED
@@ -4,9 +4,11 @@ import { defineConfig, type ViteDevServer } from 'vite';
|
|
4 |
import { nodePolyfills } from 'vite-plugin-node-polyfills';
|
5 |
import { optimizeCssModules } from 'vite-plugin-optimize-css-modules';
|
6 |
import tsconfigPaths from 'vite-tsconfig-paths';
|
7 |
-
|
8 |
import { execSync } from 'child_process';
|
9 |
|
|
|
|
|
10 |
// Get git hash with fallback
|
11 |
const getGitHash = () => {
|
12 |
try {
|
@@ -17,18 +19,21 @@ const getGitHash = () => {
|
|
17 |
};
|
18 |
|
19 |
|
|
|
|
|
20 |
export default defineConfig((config) => {
|
21 |
return {
|
22 |
define: {
|
23 |
__COMMIT_HASH: JSON.stringify(getGitHash()),
|
24 |
__APP_VERSION: JSON.stringify(process.env.npm_package_version),
|
|
|
25 |
},
|
26 |
build: {
|
27 |
target: 'esnext',
|
28 |
},
|
29 |
plugins: [
|
30 |
nodePolyfills({
|
31 |
-
include: ['path', 'buffer'],
|
32 |
}),
|
33 |
config.mode !== 'test' && remixCloudflareDevProxy(),
|
34 |
remixVitePlugin({
|
|
|
4 |
import { nodePolyfills } from 'vite-plugin-node-polyfills';
|
5 |
import { optimizeCssModules } from 'vite-plugin-optimize-css-modules';
|
6 |
import tsconfigPaths from 'vite-tsconfig-paths';
|
7 |
+
import * as dotenv from 'dotenv';
|
8 |
import { execSync } from 'child_process';
|
9 |
|
10 |
+
dotenv.config();
|
11 |
+
|
12 |
// Get git hash with fallback
|
13 |
const getGitHash = () => {
|
14 |
try {
|
|
|
19 |
};
|
20 |
|
21 |
|
22 |
+
|
23 |
+
|
24 |
export default defineConfig((config) => {
|
25 |
return {
|
26 |
define: {
|
27 |
__COMMIT_HASH: JSON.stringify(getGitHash()),
|
28 |
__APP_VERSION: JSON.stringify(process.env.npm_package_version),
|
29 |
+
// 'process.env': JSON.stringify(process.env)
|
30 |
},
|
31 |
build: {
|
32 |
target: 'esnext',
|
33 |
},
|
34 |
plugins: [
|
35 |
nodePolyfills({
|
36 |
+
include: ['path', 'buffer', 'process'],
|
37 |
}),
|
38 |
config.mode !== 'test' && remixCloudflareDevProxy(),
|
39 |
remixVitePlugin({
|