diff --git a/.github/workflows/dockerToHub-dev.yml b/.github/workflows/dockerToHub-dev.yml index 02507d9b5..fd056a7e5 100644 --- a/.github/workflows/dockerToHub-dev.yml +++ b/.github/workflows/dockerToHub-dev.yml @@ -8,8 +8,8 @@ on: jobs: build: name: build test image to aly - runs-on: thinkpad - # runs-on: ubuntu-latest + # runs-on: thinkpad + runs-on: ubuntu-latest # runs-on: self-hosted steps: - name: Check out the repo diff --git a/.github/workflows/dockerToHub.yml b/.github/workflows/dockerToHub.yml index c02788454..4a9d531f3 100644 --- a/.github/workflows/dockerToHub.yml +++ b/.github/workflows/dockerToHub.yml @@ -16,8 +16,8 @@ jobs: build: name: build image to aly # runs-on: "103.200" - runs-on: thinkpad - # runs-on: ubuntu-latest + # runs-on: thinkpad + runs-on: ubuntu-latest # runs-on: self-hosted steps: - name: Check out the repo diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index f77f08dbf..be67a079f 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -118,7 +118,7 @@ export class ChatGPTApi implements LLMApi { enumerable: true, configurable: true, writable: true, - value: Math.max(modelConfig.max_tokens, 4096), + value: modelConfig.max_tokens, }); } diff --git a/app/utils.ts b/app/utils.ts index 33b8eccd2..0436a128a 100644 --- a/app/utils.ts +++ b/app/utils.ts @@ -292,8 +292,8 @@ export function getMessageImages(message: RequestMessage): string[] { export function isVisionModel(model: string) { return ( - model.startsWith("gpt-4-vision") || - model.startsWith("gemini-pro-vision") || - !DEFAULT_MODELS.find((m) => m.name == model) + // model.startsWith("gpt-4-vision") || + // model.startsWith("gemini-pro-vision") || + model.includes("vision") ); }