Skip to content

Commit e382cdd

Browse files
committed
merge-main
2 parents ebe9c5c + 3939b5b commit e382cdd

File tree

112 files changed

+2676
-1456
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

112 files changed

+2676
-1456
lines changed

Diff for: README.md

+3-3
Original file line numberDiff line numberDiff line change
@@ -44,11 +44,11 @@
4444

4545
![edit](docs/static/img/edit.gif)
4646

47-
## Actions
47+
## Agent
4848

49-
[Actions](https://continue.dev/docs/actions/how-to-use-it) are shortcuts for common use cases
49+
[Agent](https://continue.dev/docs/agent/how-to-use-it) enables you to make more substantial changes to your codebase
5050

51-
![actions](docs/static/img/actions.gif)
51+
![agent](docs/static/img/agent.gif)
5252

5353
</div>
5454

Diff for: binary/package-lock.json

+2-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Diff for: core/autocomplete/context/root-path-context/RootPathContextService.ts

-2
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,6 @@ export class RootPathContextService {
8888
default:
8989
// const type = node.type;
9090
// console.log(getSyntaxTreeString(node));
91-
// debugger;
9291

9392
query = await getQueryForFile(
9493
filepath,
@@ -165,7 +164,6 @@ export class RootPathContextService {
165164
)) {
166165
const key = RootPathContextService.keyFromNode(parentKey, astNode);
167166
// const type = astNode.type;
168-
// debugger;
169167

170168
const foundInCache = this.cache.get(key);
171169
const newSnippets =

Diff for: core/config/yaml/loadYaml.ts

+4-4
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ async function configYamlToContinueConfig(
127127
models: [],
128128
tools: [...allTools],
129129
mcpServerStatuses: [],
130-
systemMessage: config.rules?.join("\n"),
130+
systemMessage: undefined,
131131
experimental: {
132132
modelContextProtocolServers: config.mcpServers?.map((mcpServer) => ({
133133
transport: {
@@ -211,15 +211,15 @@ async function configYamlToContinueConfig(
211211
for (const model of config.models ?? []) {
212212
model.roles = model.roles ?? modelsArrayRoles; // Default to all 4 chat-esque roles if not specified
213213
try {
214-
const llms = await llmsFromModelConfig(
214+
const llms = await llmsFromModelConfig({
215215
model,
216216
ide,
217217
uniqueId,
218218
ideSettings,
219219
writeLog,
220220
platformConfigMetadata,
221-
continueConfig.systemMessage,
222-
);
221+
config: continueConfig,
222+
});
223223

224224
if (modelsArrayRoles.some((role) => model.roles?.includes(role))) {
225225
continueConfig.models.push(...llms);

Diff for: core/config/yaml/models.ts

+73-48
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { ModelConfig } from "@continuedev/config-yaml";
22

3-
import { IDE, IdeSettings, LLMOptions } from "../..";
3+
import { ContinueConfig, IDE, IdeSettings, LLMOptions } from "../..";
44
import { BaseLLM } from "../../llm";
55
import { LLMClasses } from "../../llm/llms";
66
import { PlatformConfigMetadata } from "../profile/PlatformProfileLoader";
@@ -13,22 +13,29 @@ function getModelClass(
1313
return LLMClasses.find((llm) => llm.providerName === model.provider);
1414
}
1515

16-
function getContinueProxyModelName(
17-
ownerSlug: string,
18-
packageSlug: string,
19-
model: ModelConfig,
20-
): string {
21-
return `${ownerSlug}/${packageSlug}/${model.provider}/${model.model}`;
22-
}
23-
24-
async function modelConfigToBaseLLM(
25-
model: ModelConfig,
26-
uniqueId: string,
27-
ideSettings: IdeSettings,
28-
writeLog: (log: string) => Promise<void>,
29-
platformConfigMetadata: PlatformConfigMetadata | undefined,
30-
systemMessage: string | undefined,
31-
): Promise<BaseLLM | undefined> {
16+
// function getContinueProxyModelName(
17+
// ownerSlug: string,
18+
// packageSlug: string,
19+
// model: ModelConfig,
20+
// ): string {
21+
// return `${ownerSlug}/${packageSlug}/${model.provider}/${model.model}`;
22+
// }
23+
24+
async function modelConfigToBaseLLM({
25+
model,
26+
uniqueId,
27+
ideSettings,
28+
writeLog,
29+
platformConfigMetadata,
30+
config,
31+
}: {
32+
model: ModelConfig;
33+
uniqueId: string;
34+
ideSettings: IdeSettings;
35+
writeLog: (log: string) => Promise<void>;
36+
platformConfigMetadata: PlatformConfigMetadata | undefined;
37+
config: ContinueConfig;
38+
}): Promise<BaseLLM | undefined> {
3239
const cls = getModelClass(model);
3340

3441
if (!cls) {
@@ -50,7 +57,8 @@ async function modelConfigToBaseLLM(
5057
writeLog,
5158
uniqueId,
5259
title: model.name,
53-
systemMessage,
60+
systemMessage: config.systemMessage,
61+
rules: config.rules,
5462
promptTemplates: model.promptTemplates,
5563
capabilities: {
5664
tools: model.capabilities?.includes("tool_use"),
@@ -127,16 +135,25 @@ async function modelConfigToBaseLLM(
127135
return llm;
128136
}
129137

130-
async function autodetectModels(
131-
llm: BaseLLM,
132-
model: ModelConfig,
133-
ide: IDE,
134-
uniqueId: string,
135-
ideSettings: IdeSettings,
136-
writeLog: (log: string) => Promise<void>,
137-
platformConfigMetadata: PlatformConfigMetadata | undefined,
138-
systemMessage: string | undefined,
139-
): Promise<BaseLLM[]> {
138+
async function autodetectModels({
139+
llm,
140+
model,
141+
ide,
142+
uniqueId,
143+
ideSettings,
144+
writeLog,
145+
platformConfigMetadata,
146+
config,
147+
}: {
148+
llm: BaseLLM;
149+
model: ModelConfig;
150+
ide: IDE;
151+
uniqueId: string;
152+
ideSettings: IdeSettings;
153+
writeLog: (log: string) => Promise<void>;
154+
platformConfigMetadata: PlatformConfigMetadata | undefined;
155+
config: ContinueConfig;
156+
}): Promise<BaseLLM[]> {
140157
try {
141158
const modelNames = await llm.listModels();
142159
const detectedModels = await Promise.all(
@@ -146,8 +163,8 @@ async function autodetectModels(
146163
return undefined;
147164
}
148165

149-
return await modelConfigToBaseLLM(
150-
{
166+
return await modelConfigToBaseLLM({
167+
model: {
151168
...model,
152169
model: modelName,
153170
name: modelName,
@@ -156,8 +173,8 @@ async function autodetectModels(
156173
ideSettings,
157174
writeLog,
158175
platformConfigMetadata,
159-
systemMessage,
160-
);
176+
config,
177+
});
161178
}),
162179
);
163180
return detectedModels.filter((x) => typeof x !== "undefined") as BaseLLM[];
@@ -167,38 +184,46 @@ async function autodetectModels(
167184
}
168185
}
169186

170-
export async function llmsFromModelConfig(
171-
model: ModelConfig,
172-
ide: IDE,
173-
uniqueId: string,
174-
ideSettings: IdeSettings,
175-
writeLog: (log: string) => Promise<void>,
176-
platformConfigMetadata: PlatformConfigMetadata | undefined,
177-
systemMessage: string | undefined,
178-
): Promise<BaseLLM[]> {
179-
const baseLlm = await modelConfigToBaseLLM(
187+
export async function llmsFromModelConfig({
188+
model,
189+
ide,
190+
uniqueId,
191+
ideSettings,
192+
writeLog,
193+
platformConfigMetadata,
194+
config,
195+
}: {
196+
model: ModelConfig;
197+
ide: IDE;
198+
uniqueId: string;
199+
ideSettings: IdeSettings;
200+
writeLog: (log: string) => Promise<void>;
201+
platformConfigMetadata: PlatformConfigMetadata | undefined;
202+
config: ContinueConfig;
203+
}): Promise<BaseLLM[]> {
204+
const baseLlm = await modelConfigToBaseLLM({
180205
model,
181206
uniqueId,
182207
ideSettings,
183208
writeLog,
184209
platformConfigMetadata,
185-
systemMessage,
186-
);
210+
config,
211+
});
187212
if (!baseLlm) {
188213
return [];
189214
}
190215

191216
if (model.model === AUTODETECT) {
192-
const models = await autodetectModels(
193-
baseLlm,
217+
const models = await autodetectModels({
218+
llm: baseLlm,
194219
model,
195220
ide,
196221
uniqueId,
197222
ideSettings,
198223
writeLog,
199224
platformConfigMetadata,
200-
systemMessage,
201-
);
225+
config,
226+
});
202227
return models;
203228
} else {
204229
return [baseLlm];

Diff for: core/index.d.ts

+6-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
import { DataDestination, ModelRole } from "@continuedev/config-yaml";
1+
import { DataDestination, ModelRole, Rule } from "@continuedev/config-yaml";
22
import Parser from "web-tree-sitter";
33
import { GetGhTokenArgs } from "./protocol/ide";
4+
45
declare global {
56
interface Window {
67
ide?: "vscode";
@@ -497,6 +498,7 @@ export interface LLMOptions {
497498
templateMessages?: (messages: ChatMessage[]) => string;
498499
writeLog?: (str: string) => Promise<void>;
499500
llmRequestHook?: (model: string, prompt: string) => any;
501+
rules?: Rule[];
500502
apiKey?: string;
501503

502504
// continueProperties
@@ -1143,6 +1145,7 @@ export interface ApplyState {
11431145
numDiffs?: number;
11441146
filepath?: string;
11451147
fileContent?: string;
1148+
toolCallId?: string;
11461149
}
11471150

11481151
export interface RangeInFileWithContents {
@@ -1314,7 +1317,7 @@ export interface ContinueConfig {
13141317
docs?: SiteIndexingConfig[];
13151318
tools: Tool[];
13161319
mcpServerStatuses: MCPServerStatus[];
1317-
rules?: string[];
1320+
rules?: Rule[];
13181321
modelsByRole: Record<ModelRole, ILLM[]>;
13191322
selectedModelByRole: Record<ModelRole, ILLM | null>;
13201323
data?: DataDestination[];
@@ -1337,7 +1340,7 @@ export interface BrowserSerializedContinueConfig {
13371340
docs?: SiteIndexingConfig[];
13381341
tools: Tool[];
13391342
mcpServerStatuses: MCPServerStatus[];
1340-
rules?: string[];
1343+
rules?: Rule[];
13411344
usePlatform: boolean;
13421345
tabAutocompleteOptions?: Partial<TabAutocompleteOptions>;
13431346
modelsByRole: Record<ModelRole, ModelDescription[]>;

Diff for: core/llm/countTokens.test.ts

+1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
// @ts-nocheck
12
// Generated by continue
23
import { ChatMessage, MessagePart } from "../index.js";
34
import {

0 commit comments

Comments
 (0)