Skip to content

Commit

Permalink
feat: add organization parameter
Browse files Browse the repository at this point in the history
  • Loading branch information
CahidArda committed Sep 25, 2024
1 parent dc0dd2c commit 28a5ae8
Show file tree
Hide file tree
Showing 4 changed files with 44 additions and 8 deletions.
9 changes: 5 additions & 4 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@ on:
env:
UPSTASH_VECTOR_REST_URL: ${{ secrets.UPSTASH_VECTOR_REST_URL }}
UPSTASH_VECTOR_REST_TOKEN: ${{ secrets.UPSTASH_VECTOR_REST_TOKEN }}
UPSTASH_REDIS_REST_URL: ${{secrets.UPSTASH_REDIS_REST_URL}}
UPSTASH_REDIS_REST_TOKEN: ${{secrets.UPSTASH_REDIS_REST_TOKEN}}
OPENAI_API_KEY: ${{secrets.OPENAI_API_KEY}}
QSTASH_TOKEN: ${{secrets.QSTASH_TOKEN}}
UPSTASH_REDIS_REST_URL: ${{ secrets.UPSTASH_REDIS_REST_URL }}
UPSTASH_REDIS_REST_TOKEN: ${{ secrets.UPSTASH_REDIS_REST_TOKEN }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
OPENAI_ORGANIZATION: ${{ secrets.OPENAI_ORGANIZATION }}
QSTASH_TOKEN: ${{ secrets.QSTASH_TOKEN }}

jobs:
test:
Expand Down
3 changes: 2 additions & 1 deletion src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,12 @@ const initializeRedis = () => {
const initializeModel = () => {
const qstashToken = process.env.QSTASH_TOKEN;
const openAIToken = process.env.OPENAI_API_KEY;
const organization = process.env.OPENAI_ORGANIZATION;

if (qstashToken) return upstash("meta-llama/Meta-Llama-3-8B-Instruct", { apiKey: qstashToken });

if (openAIToken) {
return openai("gpt-4o", { apiKey: openAIToken });
return openai("gpt-4o", { apiKey: openAIToken, organization });
}

throw new Error(
Expand Down
10 changes: 8 additions & 2 deletions src/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ export type LLMClientConfig = {
logprobs?: boolean;
topLogprobs?: number;
openAIApiKey?: string;
organization?: string;
apiKey?: string;
baseUrl: string;
};
Expand Down Expand Up @@ -170,6 +171,7 @@ const setupAnalytics = (

const createLLMClient = (model: string, options: ModelOptions, provider?: Providers) => {
const apiKey = options.apiKey ?? process.env.OPENAI_API_KEY ?? "";
const organization = options.organization ?? process.env.OPENAI_ORGANIZATION ?? "";
const providerBaseUrl = options.baseUrl;
if (!apiKey) {
throw new Error(
Expand All @@ -189,11 +191,15 @@ const createLLMClient = (model: string, options: ModelOptions, provider?: Provid
configuration: {
baseURL: analyticsSetup.baseURL ?? providerBaseUrl,
...(analyticsSetup.defaultHeaders && { defaultHeaders: analyticsSetup.defaultHeaders }),
organization,
},
});
};

export const upstash = (model: UpstashChatModel, options?: Omit<ModelOptions, "baseUrl">) => {
export const upstash = (
model: UpstashChatModel,
options?: Omit<ModelOptions, "baseUrl" | "organization">
) => {
const apiKey = options?.apiKey ?? process.env.QSTASH_TOKEN ?? "";
if (!apiKey) {
throw new Error(
Expand All @@ -208,7 +214,7 @@ export const upstash = (model: UpstashChatModel, options?: Omit<ModelOptions, "b
);
};

export const custom = (model: string, options: ModelOptions) => {
export const custom = (model: string, options: Omit<ModelOptions, "organization">) => {
if (!options.baseUrl) throw new Error("baseUrl cannot be empty or undefined.");
return createLLMClient(model, options, "custom");
};
Expand Down
30 changes: 29 additions & 1 deletion src/rag-chat.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,13 @@ describe("RAG Chat with ratelimit", () => {
verbose: false,
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
configuration: {
// if the OPENAI_ORGANIZATION env var is not set, the test may pass.
// we don't want it to pass so we pass a wrong key to make the test
// fail
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
organization: process.env.OPENAI_ORGANIZATION || "wrong-key",
},
}),
vector,
redis,
Expand All @@ -124,7 +131,7 @@ describe("RAG Chat with ratelimit", () => {
await vector.deleteNamespace(namespace);
});

test(
test.only(
"should throw ratelimit error",
async () => {
let remainingLimit = -9;
Expand Down Expand Up @@ -176,6 +183,9 @@ describe("RAG Chat with custom template", () => {
verbose: false,
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
configuration: {
organization: process.env.OPENAI_ORGANIZATION,
},
}),
});

Expand Down Expand Up @@ -225,6 +235,9 @@ describe("RAG Chat addContext using PDF", () => {
verbose: false,
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
configuration: {
organization: process.env.OPENAI_ORGANIZATION,
},
}),
});

Expand Down Expand Up @@ -267,6 +280,9 @@ describe("RAG Chat without Redis, but In-memory chat history", () => {
verbose: false,
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
configuration: {
organization: process.env.OPENAI_ORGANIZATION,
},
}),
vector,
namespace,
Expand Down Expand Up @@ -324,6 +340,9 @@ describe("RAG Chat addContext using CSV", () => {
verbose: false,
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
configuration: {
organization: process.env.OPENAI_ORGANIZATION,
},
}),
});

Expand Down Expand Up @@ -366,6 +385,9 @@ describe("RAG Chat addContext using text-file", () => {
verbose: false,
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
configuration: {
organization: process.env.OPENAI_ORGANIZATION,
},
}),
});

Expand Down Expand Up @@ -412,6 +434,9 @@ describe("RAG Chat addContext using HTML", () => {
verbose: false,
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
configuration: {
organization: process.env.OPENAI_ORGANIZATION,
},
}),
});

Expand Down Expand Up @@ -793,6 +818,9 @@ describe("RAG Chat with disableHistory option", () => {
verbose: false,
temperature: 0,
apiKey: process.env.OPENAI_API_KEY,
configuration: {
organization: process.env.OPENAI_ORGANIZATION,
},
}),
vector,
redis,
Expand Down

0 comments on commit 28a5ae8

Please sign in to comment.