Skip to content

Commit

Permalink
[v2.4.9] Add openrouter support (logancyang#234)
Browse files Browse the repository at this point in the history
  • Loading branch information
logancyang committed Jan 12, 2024
1 parent 0c5e75e commit 195c7de
Show file tree
Hide file tree
Showing 10 changed files with 161 additions and 67 deletions.
2 changes: 1 addition & 1 deletion manifest.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"id": "copilot",
"name": "Copilot",
"version": "2.4.8",
"version": "2.4.9",
"minAppVersion": "0.15.0",
"description": "A ChatGPT Copilot in Obsidian.",
"author": "Logan Yang",
Expand Down
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "obsidian-copilot",
"version": "2.4.8",
"version": "2.4.9",
"description": "ChatGPT integration for Obsidian",
"main": "main.js",
"scripts": {
Expand Down
56 changes: 56 additions & 0 deletions src/aiParams.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import { ChainType } from '@/chainFactory';
import { ChatPromptTemplate } from "langchain/prompts";

export interface ModelConfig {
modelName: string,
temperature: number,
streaming: boolean,
maxRetries: number,
maxConcurrency: number,
maxTokens?: number,
openAIApiKey?: string,
anthropicApiKey?: string,
azureOpenAIApiKey?: string,
azureOpenAIApiInstanceName?: string,
azureOpenAIApiDeploymentName?: string,
azureOpenAIApiVersion?: string,
// Google and TogetherAI API key share this property
apiKey?: string,
openAIProxyBaseUrl?: string,
ollamaModel?: string,
openRouterModel?: string,
lmStudioPort?: string,
}

export interface LangChainParams {
openAIApiKey: string,
huggingfaceApiKey: string,
cohereApiKey: string,
anthropicApiKey: string,
azureOpenAIApiKey: string,
azureOpenAIApiInstanceName: string,
azureOpenAIApiDeploymentName: string,
azureOpenAIApiVersion: string,
azureOpenAIApiEmbeddingDeploymentName: string,
googleApiKey: string,
openRouterAiApiKey: string,
model: string,
modelDisplayName: string,
temperature: number,
maxTokens: number,
systemMessage: string,
chatContextTurns: number,
embeddingProvider: string,
chainType: ChainType, // Default ChainType is set in main.ts getAIStateParams
options: SetChainOptions,
ollamaModel: string,
openRouterModel: string,
lmStudioPort: string,
openAIProxyBaseUrl?: string,
}

export interface SetChainOptions {
prompt?: ChatPromptTemplate;
noteContent?: string;
forceNewCreation?: boolean;
}
96 changes: 38 additions & 58 deletions src/aiState.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ import {
OLLAMA_MODELS,
OPENAI,
OPENAI_MODELS,
OPENROUTERAI,
OPENROUTERAI_MODELS,
USER_SENDER,
} from '@/constants';
import { ChatMessage } from '@/sharedState';
Expand Down Expand Up @@ -53,60 +55,9 @@ import { RecursiveCharacterTextSplitter } from "langchain/text_splitter";
import { MemoryVectorStore } from "langchain/vectorstores/memory";
import { Notice } from 'obsidian';
import { useState } from 'react';
import { LangChainParams, ModelConfig, SetChainOptions } from './aiParams';
import { ProxyChatOpenAI, ProxyOpenAIEmbeddings } from './langchainWrappers';


interface ModelConfig {
modelName: string,
temperature: number,
streaming: boolean,
maxRetries: number,
maxConcurrency: number,
maxTokens?: number,
openAIApiKey?: string,
anthropicApiKey?: string,
azureOpenAIApiKey?: string,
azureOpenAIApiInstanceName?: string,
azureOpenAIApiDeploymentName?: string,
azureOpenAIApiVersion?: string,
// Google API key https://api.js.langchain.com/classes/langchain_google_genai.ChatGoogleGenerativeAI.html
apiKey?: string,
openAIProxyBaseUrl?: string,
ollamaModel?: string,
lmStudioPort?: string,
}

export interface LangChainParams {
openAIApiKey: string,
huggingfaceApiKey: string,
cohereApiKey: string,
anthropicApiKey: string,
azureOpenAIApiKey: string,
azureOpenAIApiInstanceName: string,
azureOpenAIApiDeploymentName: string,
azureOpenAIApiVersion: string,
azureOpenAIApiEmbeddingDeploymentName: string,
googleApiKey: string,
model: string,
modelDisplayName: string,
temperature: number,
maxTokens: number,
systemMessage: string,
chatContextTurns: number,
embeddingProvider: string,
chainType: ChainType, // Default ChainType is set in main.ts getAIStateParams
options: SetChainOptions,
ollamaModel: string,
lmStudioPort: string,
openAIProxyBaseUrl?: string,
}

export interface SetChainOptions {
prompt?: ChatPromptTemplate;
noteContent?: string;
forceNewCreation?: boolean;
}

/**
* AIState manages the chat model, LangChain, and related state.
*
Expand Down Expand Up @@ -157,6 +108,7 @@ class AIState {
private vectorStore: MemoryVectorStore;

private static isOllamaModelActive = false;
private static isOpenRouterModelActive = false;

memory: BufferWindowMemory;
langChainParams: LangChainParams;
Expand Down Expand Up @@ -224,7 +176,9 @@ class AIState {
maxTokens,
openAIProxyBaseUrl,
googleApiKey,
openRouterAiApiKey,
ollamaModel,
openRouterModel,
} = this.langChainParams;

// Create a base configuration that applies to all models
Expand Down Expand Up @@ -267,6 +221,14 @@ class AIState {
apiKey: googleApiKey,
};
break;
case OPENROUTERAI:
config = {
...config,
modelName: openRouterModel,
openAIApiKey: openRouterAiApiKey,
openAIProxyBaseUrl: 'https://openrouter.ai/api/v1',
};
break;
case LM_STUDIO:
config = {
...config,
Expand Down Expand Up @@ -331,6 +293,14 @@ class AIState {
};
}

for (const modelDisplayNameKey of OPENROUTERAI_MODELS) {
modelMap[modelDisplayNameKey] = {
hasApiKey: Boolean(this.langChainParams.openRouterAiApiKey),
AIConstructor: ProxyChatOpenAI,
vendor: OPENROUTERAI,
};
}

for (const modelDisplayNameKey of OLLAMA_MODELS) {
modelMap[modelDisplayNameKey] = {
hasApiKey: true,
Expand Down Expand Up @@ -459,6 +429,9 @@ class AIState {
case GOOGLE:
AIState.chatGoogleGenerativeAI = newModelInstance as ChatGoogleGenerativeAI;
break;
case OPENROUTERAI:
AIState.chatOpenAI = newModelInstance as ProxyChatOpenAI;
break;
case OLLAMA:
AIState.chatOllama = newModelInstance as ChatOllama;
break;
Expand All @@ -473,15 +446,20 @@ class AIState {

setModel(newModelDisplayName: string): void {
AIState.isOllamaModelActive = newModelDisplayName === ChatModelDisplayNames.OLLAMA;
AIState.isOpenRouterModelActive = newModelDisplayName === ChatModelDisplayNames.OPENROUTERAI;
// model and model display name must be update at the same time!
let newModel = getModelName(newModelDisplayName);

if (newModelDisplayName === ChatModelDisplayNames.OLLAMA) {
newModel = this.langChainParams.ollamaModel;
}

if (newModelDisplayName === ChatModelDisplayNames.LM_STUDIO) {
newModel = 'check_model_in_lm_studio_ui';
switch (newModelDisplayName) {
case ChatModelDisplayNames.OLLAMA:
newModel = this.langChainParams.ollamaModel;
break;
case ChatModelDisplayNames.LM_STUDIO:
newModel = 'check_model_in_lm_studio_ui';
break;
case ChatModelDisplayNames.OPENROUTERAI:
newModel = this.langChainParams.openRouterModel;
break;
}

try {
Expand Down Expand Up @@ -534,6 +512,8 @@ class AIState {
// setChain is async, this is to ensure Ollama has the right model passed in from the setting
if (AIState.isOllamaModelActive) {
(AIState.chatModel as ChatOllama).model = this.langChainParams.ollamaModel;
} else if (AIState.isOpenRouterModelActive) {
(AIState.chatModel as ProxyChatOpenAI).modelName = this.langChainParams.openRouterModel;
}

AIState.chain = ChainFactory.createNewLLMChain({
Expand Down
3 changes: 2 additions & 1 deletion src/components/ChatComponents/ChatIcons.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { SetChainOptions } from '@/aiState';
import { SetChainOptions } from '@/aiParams';
import {
AI_SENDER,
ChatModelDisplayNames,
Expand Down Expand Up @@ -117,6 +117,7 @@ const ChatIcons: React.FC<ChatIconsProps> = ({
<option value={ChatModelDisplayNames.AZURE_GPT_4}>{ChatModelDisplayNames.AZURE_GPT_4}</option>
<option value={ChatModelDisplayNames.AZURE_GPT_4_32K}>{ChatModelDisplayNames.AZURE_GPT_4_32K}</option>
<option value={ChatModelDisplayNames.GEMINI_PRO}>{ChatModelDisplayNames.GEMINI_PRO}</option>
<option value={ChatModelDisplayNames.OPENROUTERAI}>{ChatModelDisplayNames.OPENROUTERAI}</option>
<option value={ChatModelDisplayNames.LM_STUDIO}>{ChatModelDisplayNames.LM_STUDIO}</option>
<option value={ChatModelDisplayNames.OLLAMA}>{ChatModelDisplayNames.OLLAMA}</option>
</select>
Expand Down
9 changes: 9 additions & 0 deletions src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ export enum ChatModelDisplayNames {
AZURE_GPT_4 = 'AZURE GPT-4',
AZURE_GPT_4_32K = 'AZURE GPT-4 32K',
GEMINI_PRO = 'GEMINI PRO',
OPENROUTERAI = 'OPENROUTER.AI',
OLLAMA = 'OLLAMA (LOCAL)',
LM_STUDIO = 'LM STUDIO (LOCAL)',
}
Expand Down Expand Up @@ -67,6 +68,10 @@ export const GOOGLE_MODELS = new Set([
ChatModelDisplayNames.GEMINI_PRO,
]);

export const OPENROUTERAI_MODELS = new Set([
ChatModelDisplayNames.OPENROUTERAI,
])

export const OLLAMA_MODELS = new Set([
ChatModelDisplayNames.OLLAMA,
]);
Expand Down Expand Up @@ -99,6 +104,7 @@ export const COHEREAI = 'cohereai';
export const AZURE_OPENAI = 'azure_openai';
export const ANTHROPIC = 'anthropic';
export const GOOGLE = 'google';
export const OPENROUTERAI = 'openrouterai';
export const LM_STUDIO = 'lm_studio';
export const OLLAMA = 'ollama';

Expand All @@ -107,6 +113,7 @@ export const VENDOR_MODELS: Record<string, Set<string>> = {
[AZURE_OPENAI]: AZURE_MODELS,
[ANTHROPIC]: CLAUDE_MODELS,
[GOOGLE]: GOOGLE_MODELS,
[OPENROUTERAI]: OPENROUTERAI_MODELS,
[OLLAMA]: OLLAMA_MODELS,
[LM_STUDIO]: LM_STUDIO_MODELS,
};
Expand All @@ -127,6 +134,8 @@ export const DEFAULT_SETTINGS: CopilotSettings = {
azureOpenAIApiVersion: '',
azureOpenAIApiEmbeddingDeploymentName: '',
googleApiKey: '',
openRouterAiApiKey: '',
openRouterModel: 'cognitivecomputations/dolphin-mixtral-8x7b',
defaultModel: ChatModels.GPT_4_TURBO,
defaultModelDisplayName: ChatModelDisplayNames.GPT_4_TURBO,
temperature: 0.1,
Expand Down
9 changes: 8 additions & 1 deletion src/main.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import AIState, { LangChainParams, SetChainOptions } from '@/aiState';
import { LangChainParams, SetChainOptions } from '@/aiParams';
import AIState from '@/aiState';
import { ChainType } from '@/chainFactory';
import { AddPromptModal } from "@/components/AddPromptModal";
import CopilotView from '@/components/CopilotView';
Expand Down Expand Up @@ -28,6 +29,8 @@ export interface CopilotSettings {
azureOpenAIApiVersion: string;
azureOpenAIApiEmbeddingDeploymentName: string;
googleApiKey: string;
openRouterAiApiKey: string;
openRouterModel: string;
defaultModel: string;
defaultModelDisplayName: string;
temperature: number;
Expand Down Expand Up @@ -485,6 +488,8 @@ export default class CopilotPlugin extends Plugin {
azureOpenAIApiVersion,
azureOpenAIApiEmbeddingDeploymentName,
googleApiKey,
openRouterAiApiKey,
openRouterModel,
temperature,
maxTokens,
contextTurns,
Expand All @@ -503,6 +508,8 @@ export default class CopilotPlugin extends Plugin {
azureOpenAIApiVersion,
azureOpenAIApiEmbeddingDeploymentName,
googleApiKey,
openRouterAiApiKey,
openRouterModel: openRouterModel || DEFAULT_SETTINGS.openRouterModel,
ollamaModel: ollamaModel || DEFAULT_SETTINGS.ollamaModel,
lmStudioPort: lmStudioPort || DEFAULT_SETTINGS.lmStudioPort,
model: this.settings.defaultModel,
Expand Down
Loading

0 comments on commit 195c7de

Please sign in to comment.