Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DX-1248: Fix examples and exports #78

Closed
wants to merge 13 commits into from
54 changes: 0 additions & 54 deletions .eslintrc.json

This file was deleted.

2 changes: 1 addition & 1 deletion .husky/pre-commit
Original file line number Diff line number Diff line change
@@ -1 +1 @@
bun run lint && bun run fmt
bun run lint-staged && bun run check-types
Binary file modified bun.lockb
Binary file not shown.
117 changes: 117 additions & 0 deletions eslint.config.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
import typescriptEslint from "@typescript-eslint/eslint-plugin";
import unicorn from "eslint-plugin-unicorn";
import path from "node:path";
import { fileURLToPath } from "node:url";
import js from "@eslint/js";
import { FlatCompat } from "@eslint/eslintrc";

const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const compat = new FlatCompat({
baseDirectory: __dirname,
recommendedConfig: js.configs.recommended,
allConfig: js.configs.all,
});

export default [
{
ignores: ["**/*.config.*", "examples/**/*"],
},
...compat.extends(
"eslint:recommended",
"plugin:unicorn/recommended",
"plugin:@typescript-eslint/strict-type-checked",
"plugin:@typescript-eslint/stylistic-type-checked"
),
{
plugins: {
"@typescript-eslint": typescriptEslint,
unicorn,
},

languageOptions: {
globals: {},
ecmaVersion: 5,
sourceType: "script",

parserOptions: {
project: "./tsconfig.json",
},
},

rules: {
"no-console": [
"error",
{
allow: ["warn", "error"],
},
],

"@typescript-eslint/no-magic-numbers": [
"error",
{
ignore: [-1, 0, 1, 100],
ignoreArrayIndexes: true,
},
],

"@typescript-eslint/unbound-method": "off",
"@typescript-eslint/prefer-as-const": "error",
"@typescript-eslint/consistent-type-imports": "error",
"@typescript-eslint/restrict-template-expressions": "off",
"@typescript-eslint/consistent-type-definitions": ["error", "type"],

"@typescript-eslint/no-unused-vars": [
"error",
{
varsIgnorePattern: "^_",
argsIgnorePattern: "^_",
},
],

"@typescript-eslint/prefer-ts-expect-error": "off",

"@typescript-eslint/no-misused-promises": [
"error",
{
checksVoidReturn: false,
},
],

"unicorn/prevent-abbreviations": [
2,
{
replacements: {
args: false,
props: false,
db: false,
},
},
],

"no-implicit-coercion": [
"error",
{
boolean: true,
},
],

"no-extra-boolean-cast": [
"error",
{
enforceForLogicalOperands: true,
},
],

"no-unneeded-ternary": [
"error",
{
defaultAssignment: true,
},
],

"unicorn/no-array-reduce": ["off"],
"unicorn/no-nested-ternary": "off",
},
},
];
1 change: 1 addition & 0 deletions examples/cloudflare-workers/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
dist/

# dev
.dev.vars
.yarn/
!.yarn/releases
.vscode/*
Expand Down
10 changes: 2 additions & 8 deletions examples/cloudflare-workers/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ The project includes four endpoints:
- `/chat-stream` to make a chat request with rag-chat using Upstash LLM with streaming.
- `/chat-stream-openai` to make a chat request with rag-chat using OpenAI LLM with streaming.

You can check out the `src/router.ts` file to see how each endpoint works.
You can check out the `src/index.ts` file to see how each endpoint works.

For running the app locally, first run `npm install` to install the packages. Then, see the `Set Environment Variables` and `Development` sections below.

Expand All @@ -28,13 +28,7 @@ npm install @upstash/rag-chat
Ensure your wrangler.toml file includes the following configuration to enable Node.js compatibility:

```toml
node_compat = true
```

In older CF worker versions, you may need to set the following compatibility flags:

```toml
compatibility_flags = [ "streams_enable_constructors", "transformstream_enable_standard_constructor" ]
compatibility_flags = ["nodejs_compat_v2"]
```

### 3. Set Environment Variables
Expand Down
4 changes: 2 additions & 2 deletions examples/cloudflare-workers/package.json
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
{
"name": "cloudflare-workers",
"name": "cloudflare-workers-example",
"scripts": {
"dev": "wrangler dev src/index.ts",
"deploy": "wrangler deploy --minify src/index.ts"
},
"dependencies": {
"@langchain/openai": "^0.1.3",
"@upstash/rag-chat": "^1.0.3",
"@upstash/rag-chat": "latest",
"hono": "^4.5.1"
},
"devDependencies": {
Expand Down
146 changes: 138 additions & 8 deletions examples/cloudflare-workers/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,138 @@
import type { Env } from "hono/types";
import app from "./router";

export default {
async fetch(request: Request, environment: Env, context: ExecutionContext): Promise<Response> {
return app.fetch(request, environment, context);
},
};
import { Hono } from "hono";
import { Index } from "@upstash/vector";
import { RAGChat, upstash } from "@upstash/rag-chat";

const app = new Hono<{
Variables: {
ragChat: RAGChat;
};
Bindings: {
UPSTASH_REDIS_REST_URL: string;
UPSTASH_REDIS_REST_TOKEN: string;
UPSTASH_VECTOR_REST_URL: string;
UPSTASH_VECTOR_REST_TOKEN: string;
QSTASH_TOKEN: string;
OPENAI_API_KEY: string;
};
}>();

app.use("*", async (c, next) => {
const ragChat = new RAGChat({
model: upstash("meta-llama/Meta-Llama-3-8B-Instruct", { apiKey: c.env.QSTASH_TOKEN }),
vector: new Index({
url: c.env.UPSTASH_VECTOR_REST_URL,
token: c.env.UPSTASH_VECTOR_REST_TOKEN,
cache: false,
}),
});

c.set("ragChat", ragChat);

await next();
});

app.get("/", (c) => {
const landingPage = `
<html>
<body>
<h1>Available Endpoints</h1>
<ul>
<li><a href="/add-data">Add Data</a></li>
<li><a href="/chat">Chat</a></li>
<li><a href="/chat-stream">Chat Stream (Upstash)</a></li>
<li><a href="/chat-stream-openai">Chat Stream (Open AI)</a></li>
</ul>
</body>
</html>
`;
return c.html(landingPage);
});

app.get("/add-data", async (c) => {
const ragChat = c.var.ragChat;

const result = await Promise.all([
ragChat.context.add({
type: "text",
data: "Paris, the capital of France, is renowned for its iconic landmark, the Eiffel Tower, which was completed in 1889 and stands at 330 meters tall.",
}),
ragChat.context.add({
type: "text",
data: "The city is home to numerous world-class museums, including the Louvre Museum, housing famous works such as the Mona Lisa and Venus de Milo.",
}),
ragChat.context.add({
type: "text",
data: "Paris is often called the City of Light due to its significant role during the Age of Enlightenment and its early adoption of street lighting.",
}),
ragChat.context.add({
type: "text",
data: "The Seine River gracefully flows through Paris, dividing the city into the Left Bank and the Right Bank, each offering its own distinct atmosphere.",
}),
ragChat.context.add({
type: "text",
data: "Paris boasts a rich culinary scene, with a plethora of bistros, cafés, and Michelin-starred restaurants serving exquisite French cuisine.",
}),
]);

return c.text(`Added data. Result: ${JSON.stringify(result)}`, 200);
});

app.get("/chat", async (c) => {
const response = await c.var.ragChat.chat("What is paris called?", { streaming: false });

return c.text(response.output, 200);
});

app.get("/chat-stream", async (c) => {
const response = await c.var.ragChat.chat(
"Describe what Paris is known as, narrating in the style of Dostoyevsky, and provide the answer in approximately a thousand words.",
{ streaming: true }
);

const textEncoder = new TextEncoder();
const { readable, writable } = new TransformStream<string>({
transform(chunk, controller) {
controller.enqueue(textEncoder.encode(chunk));
},
});

// Start pumping the body. NOTE: No await!
void response.output.pipeTo(writable);

// ... and deliver our Response while that’s running.
return new Response(readable, {
status: 200,
headers: {
"Content-Type": "text/plain; charset=utf-8",
"Cache-Control": "no-cache, no-transform",
},
});
});

app.get("/chat-stream-openai", async (c) => {
const response = await c.var.ragChat.chat(
"Describe what Paris is known as, narrating in the style of Dostoyevsky, and provide the answer in approximately a thousand words.",
{ streaming: true }
);

const textEncoder = new TextEncoder();
const { readable, writable } = new TransformStream<string>({
transform(chunk, controller) {
controller.enqueue(textEncoder.encode(chunk));
},
});

// Start pumping the body. NOTE: No await!
void response.output.pipeTo(writable);

// ... and deliver our Response while that’s running.
return new Response(readable, {
status: 200,
headers: {
"Content-Type": "text/plain; charset=utf-8",
"Cache-Control": "no-cache, no-transform",
},
});
});

export default app;
Loading
Loading