Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Telegram bot #80

Merged
merged 5 commits into from
Jun 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion apps/cf-ai-backend/src/helper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,20 @@ export async function deleteDocument({
}
}

function sanitizeKey(key: string): string {
if (!key) throw new Error("Key cannot be empty");

// Remove or replace invalid characters
let sanitizedKey = key.replace(/[.$"]/g, "_");

// Ensure key does not start with $
if (sanitizedKey.startsWith("$")) {
sanitizedKey = sanitizedKey.substring(1);
}

return sanitizedKey;
}

export async function batchCreateChunksAndEmbeddings({
store,
body,
Expand Down Expand Up @@ -172,7 +186,7 @@ export async function batchCreateChunksAndEmbeddings({
type: body.type ?? "page",
content: newPageContent,

[`user-${body.user}`]: 1,
[sanitizeKey(`user-${body.user}`)]: 1,
...body.spaces?.reduce((acc, space) => {
acc[`space-${body.user}-${space}`] = 1;
return acc;
Expand Down
161 changes: 160 additions & 1 deletion apps/cf-ai-backend/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { z } from "zod";
import { Hono } from "hono";
import { CoreMessage, streamText } from "ai";
import { CoreMessage, generateText, streamText, tool } from "ai";
import { chatObj, Env, vectorObj } from "./types";
import {
batchCreateChunksAndEmbeddings,
Expand All @@ -15,6 +15,7 @@ import { zValidator } from "@hono/zod-validator";
import chunkText from "./utils/chonker";
import { systemPrompt, template } from "./prompts/prompt1";
import { swaggerUI } from "@hono/swagger-ui";
import { createOpenAI } from "@ai-sdk/openai";

const app = new Hono<{ Bindings: Env }>();

Expand Down Expand Up @@ -169,6 +170,164 @@ app.get(
},
);

// This is a special endpoint for our "chatbot-only" solutions.
// It does both - adding content AND chatting with it.
app.post(
"/api/autoChatOrAdd",
zValidator(
"query",
z.object({
query: z.string(),
user: z.string(),
}),
),
zValidator("json", chatObj),
async (c) => {
const { query, user } = c.req.valid("query");
const { chatHistory } = c.req.valid("json");

const { store, model } = await initQuery(c);

// we're creating another instance of the model here because we want to use a cheaper model for this.
const openai = createOpenAI({
apiKey: c.env.OPENAI_API_KEY,
});

let task: "add" | "chat" = "chat";
let thingToAdd: "page" | "image" | "text" | undefined = undefined;
let addContent: string | undefined = undefined;

// This is a "router". this finds out if the user wants to add a document, or chat with the AI to get a response.
const routerQuery = await generateText({
model: openai.chat("gpt-3.5-turbo"),
system: `You are Supermemory chatbot. You can either add a document to the supermemory database, or return a chat response. Based on this query,
You must determine what to do. Basically if it feels like a "question", then you should intiate a chat. If it feels like a "command" or feels like something that could be forwarded to the AI, then you should add a document.
You must also extract the "thing" to add and what type of thing it is.`,
prompt: `Question from user: ${query}`,
tools: {
decideTask: tool({
description:
"Decide if the user wants to add a document or chat with the AI",
parameters: z.object({
generatedTask: z.enum(["add", "chat"]),
contentToAdd: z.object({
thing: z.enum(["page", "image", "text"]),
content: z.string(),
}),
}),
execute: async ({ generatedTask, contentToAdd }) => {
task = generatedTask;
thingToAdd = contentToAdd.thing;
addContent = contentToAdd.content;
},
}),
},
});

if ((task as string) === "add") {
// addString is the plaintext string that the user wants to add to the database
let addString: string = addContent;

if (thingToAdd === "page") {
// TODO: Sometimes this query hangs, and errors out. we need to do proper error management here.
const response = await fetch("https://md.dhr.wtf/?url=" + addContent, {
headers: {
Authorization: "Bearer " + c.env.SECURITY_KEY,
},
});

addString = await response.text();
}

// At this point, we can just go ahead and create the embeddings!
await batchCreateChunksAndEmbeddings({
store,
body: {
url: addContent,
user,
type: thingToAdd,
pageContent: addString,
title: `${addString.slice(0, 30)}... (Added from chatbot)`,
},
chunks: chunkText(addString, 1536),
context: c,
});

return c.json({
status: "ok",
response:
"I added the document to your personal second brain! You can now use it to answer questions or chat with me.",
contentAdded: {
type: thingToAdd,
content: addString,
url:
thingToAdd === "page"
? addContent
: `https://supermemory.ai/note/${Date.now()}`,
},
});
} else {
const filter: VectorizeVectorMetadataFilter = {
[`user-${user}`]: 1,
};

const queryAsVector = await store.embeddings.embedQuery(query);

const resp = await c.env.VECTORIZE_INDEX.query(queryAsVector, {
topK: 5,
filter,
returnMetadata: true,
});

const minScore = Math.min(...resp.matches.map(({ score }) => score));
const maxScore = Math.max(...resp.matches.map(({ score }) => score));

// This entire chat part is basically just a dumb down version of the /api/chat endpoint.
const normalizedData = resp.matches.map((data) => ({
...data,
normalizedScore:
maxScore !== minScore
? 1 + ((data.score - minScore) / (maxScore - minScore)) * 98
: 50,
}));

const preparedContext = normalizedData.map(
({ metadata, score, normalizedScore }) => ({
context: `Website title: ${metadata!.title}\nDescription: ${metadata!.description}\nURL: ${metadata!.url}\nContent: ${metadata!.text}`,
score,
normalizedScore,
}),
);

const prompt = template({
contexts: preparedContext,
question: query,
});

const initialMessages: CoreMessage[] = [
{
role: "system",
content: `You are an AI chatbot called "Supermemory.ai". When asked a question by a user, you must take all the context provided to you and give a good, small, but helpful response.`,
},
{ role: "assistant", content: "Hello, how can I help?" },
];

const userMessage: CoreMessage = { role: "user", content: prompt };

const response = await generateText({
model,
messages: [
...initialMessages,
...((chatHistory || []) as CoreMessage[]),
userMessage,
],
});

return c.json({ status: "ok", response: response.text });
}
},
);

/* TODO: Eventually, we should not have to save each user's content in a seperate vector.
Lowkey, it makes sense. The user may save their own version of a page - like selected text from twitter.com url.
But, it's not scalable *enough*. How can we store the same vectors for the same content, without needing to duplicate for each uer?
Expand Down
14 changes: 12 additions & 2 deletions apps/web/app/(auth)/signin/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,17 @@ import { Google } from "@repo/ui/components/icons";

export const runtime = "edge";

async function Signin() {
async function Signin({
searchParams,
}: {
searchParams: Record<string, string | string[] | undefined>;
}) {
const searchParamsAsString = Object.keys(searchParams)
.map((key) => {
return `${key}=${searchParams[key]}`;
})
.join("&");

return (
<div className="flex items-center justify-between min-h-screen">
<div className="relative w-full lg:w-1/2 flex items-center justify-center lg:justify-start min-h-screen bg-secondary p-8">
Expand All @@ -30,7 +40,7 @@ async function Signin() {
action={async () => {
"use server";
await signIn("google", {
redirectTo: "/home?firstTime=true",
redirectTo: `/home?firstTime=true&${searchParamsAsString}`,
});
}}
>
Expand Down
4 changes: 4 additions & 0 deletions apps/web/app/(canvas)/canvas/[id]/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,15 @@ import { AutocompleteIcon, blockIcon } from "@repo/ui/icons";
import Image from "next/image";
import { Switch } from "@repo/ui/shadcn/switch";
import { Label } from "@repo/ui/shadcn/label";
import { useRouter } from "next/router";

function page() {
const [fullScreen, setFullScreen] = useState(false);
const [visible, setVisible] = useState(true);

const router = useRouter();
router.push("/home");

return (
<div
className={`h-screen w-full ${!fullScreen ? "px-4 py-6" : "bg-[#1F2428]"} transition-all`}
Expand Down
11 changes: 6 additions & 5 deletions apps/web/app/(canvas)/canvas/page.tsx
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import React from 'react'
import { useRouter } from "next/router";
import React from "react";

function page() {
return (
<div>page</div>
)
const router = useRouter();
router.push("/home");
return <div>page</div>;
}

export default page
export default page;
22 changes: 21 additions & 1 deletion apps/web/app/(dash)/home/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@ import QueryInput from "./queryinput";
import { homeSearchParamsCache } from "@/lib/searchParams";
import { getSpaces } from "@/app/actions/fetchers";
import { useRouter } from "next/navigation";
import { createChatThread } from "@/app/actions/doers";
import { createChatThread, linkTelegramToUser } from "@/app/actions/doers";
import { toast } from "sonner";

function Page({
searchParams,
Expand All @@ -14,11 +15,30 @@ function Page({
}) {
// TODO: use this to show a welcome page/modal
// const { firstTime } = homeSearchParamsCache.parse(searchParams);

const [telegramUser, setTelegramUser] = useState<string | undefined>(
searchParams.telegramUser as string,
);

const { push } = useRouter();

const [spaces, setSpaces] = useState<{ id: number; name: string }[]>([]);

useEffect(() => {
if (telegramUser) {
const linkTelegram = async () => {
const response = await linkTelegramToUser(telegramUser);

if (response.success) {
toast.success("Your telegram has been linked successfully.");
} else {
toast.error("Failed to link telegram. Please try again.");
}
};

linkTelegram();
}

getSpaces().then((res) => {
if (res.success && res.data) {
setSpaces(res.data);
Expand Down
2 changes: 1 addition & 1 deletion apps/web/app/(landing)/twitterLink.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export function HoverBorderGradient({
if (!directions[nextIndex]) {
return directions[0]!;
}
return directions[nextIndex];
return directions[nextIndex]!;
};

const movingMap: Record<Direction, string> = {
Expand Down
31 changes: 31 additions & 0 deletions apps/web/app/actions/doers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
contentToSpace,
space,
storedContent,
users,
} from "../../server/db/schema";
import { ServerActionReturnType } from "./types";
import { auth } from "../../server/auth";
Expand All @@ -17,6 +18,7 @@ import { and, eq, inArray, sql } from "drizzle-orm";
import { LIMITS } from "@/lib/constants";
import { z } from "zod";
import { ChatHistory } from "@repo/shared-types";
import { decipher } from "@/server/encrypt";

export const createSpace = async (
input: string | FormData,
Expand Down Expand Up @@ -344,3 +346,32 @@ export const createChatObject = async (
data: true,
};
};

export const linkTelegramToUser = async (
telegramUser: string,
): ServerActionReturnType<boolean> => {
const data = await auth();

if (!data || !data.user || !data.user.id) {
return { error: "Not authenticated", success: false };
}

const user = await db
.update(users)
.set({ telegramId: decipher(telegramUser) })
.where(eq(users.id, data.user.id))
.execute();

if (!user) {
return {
success: false,
data: false,
error: "Failed to link telegram to user",
};
}

return {
success: true,
data: true,
};
};
Loading