{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"nextjs-fastapi-agent-boilerplate-agentgpt","owner":"lablab-ai","isFork":true,"description":"🤖 Assemble, configure, and deploy autonomous AI Agents in your browser.","allTopics":[],"primaryLanguage":{"name":"TypeScript","color":"#3178c6"},"pullRequestCount":0,"issueCount":0,"starsCount":6,"forksCount":9155,"license":"GNU General Public License v3.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-17T00:53:57.336Z"}},{"type":"Public","name":"virtual-agent-town-ts-starter-kit","owner":"lablab-ai","isFork":true,"description":"A MIT-licensed, deployable starter kit for building and customizing your own version of AI town - a virtual town where AI characters live, chat and socialize.","allTopics":[],"primaryLanguage":{"name":"TypeScript","color":"#3178c6"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":649,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-16T23:19:19.316Z"}},{"type":"Public","name":"webgpu-llm-simple-chat-starter","owner":"lablab-ai","isFork":false,"description":"This repo provides a complete implementation of a simple chat app based on WebLLM.","allTopics":[],"primaryLanguage":{"name":"TypeScript","color":"#3178c6"},"pullRequestCount":1,"issueCount":1,"starsCount":2,"forksCount":4,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-02-05T15:52:50.712Z"}},{"type":"Public","name":"webgpu-llm-nextjs-simple-chat-starter","owner":"lablab-ai","isFork":false,"description":"This is a Next.js project using web-llm.","allTopics":[],"primaryLanguage":{"name":"TypeScript","color":"#3178c6"},"pullRequestCount":0,"issueCount":0,"starsCount":1,"forksCount":1,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-10T12:20:53.130Z"}},{"type":"Public","name":"get-started-with-webgpu-llm-starter","owner":"lablab-ai","isFork":false,"description":"This repo provides a minimum demo to show WebLLM API in a webapp setting. To try it out, you can do the following steps under this folder","allTopics":[],"primaryLanguage":{"name":"TypeScript","color":"#3178c6"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":1,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-08-10T12:20:46.142Z"}}],"repositoryCount":5,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}