Skip to content

Commit

Permalink
Remove proxy server for LocalAI (logancyang#123)
Browse files Browse the repository at this point in the history
* Remove LocalAI Proxy and use openAIProxyBaseURL

* Update LocalAI setup to include metal build

* Update documentation for LocalAI CORS setup
  • Loading branch information
Sokole1 committed Aug 8, 2023
1 parent 1f945cf commit 3417043
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 111 deletions.
14 changes: 10 additions & 4 deletions localai_setup.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ git clone https://github.com/go-skynet/LocalAI.git
cd LocalAI

# build the binary
make build
make BUILD_TYPE=metal build

# Start localai with model gallery
GALLERIES='[{"name":"model-gallery", "url":"github:go-skynet/model-gallery/index.yaml"}, {"url": "github:go-skynet/model-gallery/huggingface.yaml","name":"huggingface"}]' ./local-ai --models-path ./models/ --debug
# Start localai with model gallery with CORS enabled
GALLERIES='[{"name":"model-gallery", "url":"github:go-skynet/model-gallery/index.yaml"}, {"url": "github:go-skynet/model-gallery/huggingface.yaml","name":"huggingface"}]' ./local-ai --models-path ./models/ --debug --cors
```

(Here's the original [localAI guide](https://localai.io/basics/build/#build-on-mac) I referred to. I added some more details for the errors I saw when I set it up on my M1 Macbook Air. Hope this helps!)
Expand Down Expand Up @@ -58,6 +58,12 @@ Set the `GALLERIES` env variable to load models from model-gallery. Edit `.env`
GALLERIES=[{"name":"model-gallery", "url":"github:go-skynet/model-gallery/index.yaml"}, {"url": "github:go-skynet/model-gallery/huggingface.yaml","name":"huggingface"}]
```

Also uncomment the `CORS` settings in `.env`

```
CORS=true
```

Then start the Docker container,

```bash
Expand Down Expand Up @@ -166,4 +172,4 @@ Now hopefully you should be able to turn off your internet and still have full C
## Caveats
This is still in early experimental phase. Local LLMs require some experience to set up and interact with. Please be careful with any large models and ensure you have enough storage and memory for them.

If you experience issues, feel free to reach out in the discussion section. If you believe the bug is with Copilot, please submit an issue. If you think the issue is with LocalAI itself, please open an issue there instead. Again, they have a great Discord community to help troubleshoot as well, consider joining there!
If you experience issues, feel free to reach out in the discussion section. If you believe the bug is with Copilot, please submit an issue. If you think the issue is with LocalAI itself, please open an issue there instead. Again, they have a great Discord community to help troubleshoot as well, consider joining there!
15 changes: 7 additions & 8 deletions src/aiState.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ interface ModelConfig {
azureOpenAIApiDeploymentName?: string,
azureOpenAIApiVersion?: string,
openAIProxyBaseUrl?: string,
useLocalProxy?: boolean,
localAIModel?: string,
}

Expand All @@ -87,7 +86,6 @@ export interface LangChainParams {
chainType: ChainType, // Default ChainType is set in main.ts getAIStateParams
options: SetChainOptions,
openAIProxyBaseUrl?: string,
useLocalProxy?: boolean,
localAIModel?: string,
}

Expand Down Expand Up @@ -171,7 +169,6 @@ class AIState {
temperature,
maxTokens,
openAIProxyBaseUrl,
useLocalProxy,
localAIModel,
} = this.langChainParams;

Expand All @@ -191,7 +188,6 @@ class AIState {
openAIApiKey,
maxTokens,
openAIProxyBaseUrl,
useLocalProxy,
localAIModel,
};
break;
Expand Down Expand Up @@ -265,7 +261,6 @@ class AIState {
azureOpenAIApiVersion,
azureOpenAIApiEmbeddingDeploymentName,
openAIProxyBaseUrl,
useLocalProxy,
} = this.langChainParams;

const OpenAIEmbeddingsAPI = new OpenAIEmbeddings({
Expand Down Expand Up @@ -307,7 +302,6 @@ class AIState {
return new ProxyOpenAIEmbeddings({
openAIApiKey,
openAIProxyBaseUrl,
useLocalProxy,
maxRetries: 3,
maxConcurrency: 3,
timeout: 10000,
Expand Down Expand Up @@ -364,14 +358,19 @@ class AIState {
setModel(newModelDisplayName: string): void {
// model and model display name must be update at the same time!
let newModel = getModelName(newModelDisplayName);
const {useLocalProxy, localAIModel} = this.langChainParams;
const {localAIModel} = this.langChainParams;

if (newModelDisplayName === ChatModelDisplayNames.LOCAL_AI && useLocalProxy) {
if (newModelDisplayName === ChatModelDisplayNames.LOCAL_AI) {
if (!localAIModel) {
new Notice('No local AI model provided! Please set it in settings first.');
console.error('No local AI model provided! Please set it in settings first.');
return;
}
if (!this.langChainParams.openAIProxyBaseUrl) {
new Notice('Please set the OpenAI Proxy Base URL in settings.');
console.error('Please set the OpenAI Proxy Base URL in settings.');
return;
}
newModel = localAIModel;
}
this.langChainParams.model = newModel;
Expand Down
4 changes: 0 additions & 4 deletions src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,9 +90,6 @@ export const DISTILBERT_NLI = 'sentence-transformers/distilbert-base-nli-mean-to
export const INSTRUCTOR_XL = 'hkunlp/instructor-xl'; // Inference API is off for this
export const MPNET_V2 = 'sentence-transformers/all-mpnet-base-v2'; // Inference API returns 400

// Proxy parameters
export const PROXY_SERVER_PORT = 3001;
export const LOCALAI_URL = 'https://localhost:8080/v1'; // LocalAI server
// export const LOCALAI_DEFAULT_MODEL = 'ggml-gpt4all-j';

export const DEFAULT_SETTINGS: CopilotSettings = {
Expand All @@ -113,7 +110,6 @@ export const DEFAULT_SETTINGS: CopilotSettings = {
useNotesAsContext: false,
userSystemPrompt: '',
openAIProxyBaseUrl: '',
useLocalProxy: false,
localAIModel: '',
stream: true,
embeddingProvider: OPENAI,
Expand Down
10 changes: 2 additions & 8 deletions src/langchainWrappers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,8 @@ export class ProxyChatOpenAI extends ChatOpenAI {
) {
super(fields ?? {});

const modelName = fields.useLocalProxy ? fields.localAIModel : fields.modelName;
if (fields.useLocalProxy) {
console.log('Using local proxy, LocalAI model: ', modelName);
}
// Use LocalAIModel if it is set
const modelName = fields.localAIModel ? fields.localAIModel : fields.modelName;

const clientConfig = new Configuration({
...this["clientConfig"],
Expand All @@ -30,10 +28,6 @@ export class ProxyOpenAIEmbeddings extends OpenAIEmbeddings {
) {
super(fields ?? {});

if (fields.useLocalProxy) {
console.log('Using local proxy, LocalAI embedding. ');
}

const clientConfig = new Configuration({
...this["clientConfig"],
basePath: fields.openAIProxyBaseUrl,
Expand Down
66 changes: 0 additions & 66 deletions src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@ import { ListPromptModal } from "@/components/ListPromptModal";
import { ToneModal } from "@/components/ToneModal";
import {
CHAT_VIEWTYPE, DEFAULT_SETTINGS, DEFAULT_SYSTEM_PROMPT,
LOCALAI_URL,
PROXY_SERVER_PORT
} from '@/constants';
import { CopilotSettingTab } from '@/settings';
import SharedState from '@/sharedState';
Expand Down Expand Up @@ -40,7 +38,6 @@ export interface CopilotSettings {
useNotesAsContext: boolean;
userSystemPrompt: string;
openAIProxyBaseUrl: string;
useLocalProxy: boolean;
localAIModel: string;
stream: boolean;
embeddingProvider: string;
Expand Down Expand Up @@ -72,12 +69,6 @@ export default class CopilotPlugin extends Plugin {
// Always have one instance of sharedState and aiState in the plugin
this.sharedState = new SharedState();
const langChainParams = this.getAIStateParams();
if (this.settings.useLocalProxy) {
// If using local proxy, 3rd party proxy is overridden
langChainParams.openAIProxyBaseUrl = `https://localhost:${PROXY_SERVER_PORT}`;
langChainParams.useLocalProxy = true;
await this.startProxyServer(LOCALAI_URL);
}
this.aiState = new AIState(langChainParams);

this.dbPrompts = new PouchDB<CustomPrompt>('copilot_custom_prompts');
Expand Down Expand Up @@ -377,10 +368,6 @@ export default class CopilotPlugin extends Plugin {
});
}

async onunload() {
await this.stopProxyServer();
}

processSelection(editor: Editor, eventType: string, eventSubtype?: string) {
if (editor.somethingSelected() === false) {
new Notice('Please select some text to rewrite.');
Expand Down Expand Up @@ -498,57 +485,4 @@ export default class CopilotPlugin extends Plugin {
openAIProxyBaseUrl: this.settings.openAIProxyBaseUrl,
};
}

async startProxyServer(proxyBaseUrl: string) {
console.log('loading plugin');
// check if the port is already in use
const inUse = await this.checkPortInUse(PROXY_SERVER_PORT);

if (!inUse) {
// Create a new Koa application
const app = new Koa();

app.use(cors());

// Create and apply the proxy middleware
app.use(proxy('/', {
// your target API, e.g. https://localhost:8080 for LocalAI
target: proxyBaseUrl,
changeOrigin: true,
}));

// Start the server on the specified port
this.server = app.listen(PROXY_SERVER_PORT);
console.log(`Proxy server running on https://localhost:${PROXY_SERVER_PORT}`);
} else {
console.error(`Port ${PROXY_SERVER_PORT} is in use`);
}
}

async stopProxyServer() {
console.log('stopping proxy server...');
if (this.server) {
this.server.close();
}
}

checkPortInUse(port: number) {
return new Promise((resolve, reject) => {
const server = net.createServer()
.once('error', (err: NodeJS.ErrnoException) => { // Typecast here
if (err.code === 'EADDRINUSE') {
resolve(true); // Port is in use
} else {
reject(err);
}
})
.once('listening', () => {
server.once('close', () => {
resolve(false); // Port is not in use
})
.close();
})
.listen(port);
});
}
}
23 changes: 2 additions & 21 deletions src/settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -400,32 +400,13 @@ export class CopilotSettingTab extends PluginSettingTab {
containerEl.createEl('h6', { text: 'To use Local Copilot, please check the doc/demo video to set up LocalAI server on your device. Once ready, switch on the toggle below, type in the LocalAI Model name you have, and pick LocalAI in the Copilot Chat model selection dropdown to chat with it!' });
containerEl.createEl('h6', { text: 'Local models can be limited in capabilities and may not work for some use cases at this time. Keep in mind that it is still in early experimental phase. But it is definitely fun to try out!' });

new Setting(containerEl)
.setName("Use Local Copilot")
.setDesc(
createFragment((frag) => {
frag.appendText("Toggle this switch to launch a local proxy server. If this is on, 3rd-party proxy in Advanced Setting is overridden.");
frag.createEl('br');
frag.createEl(
'strong',
{ text: "Plugin restart required." }
);
})
)
.addToggle((toggle) => {
toggle
.setValue(this.plugin.settings.useLocalProxy)
.onChange(async (value) => {
this.plugin.settings.useLocalProxy = value;
await this.plugin.saveSettings();
});
});

new Setting(containerEl)
.setName("LocalAI Model")
.setDesc(
createFragment((frag) => {
frag.appendText("The local model you'd like to use. Make sure you download that model in your LocalAI models directory.");
frag.createEl('br');
frag.appendText("NOTE: Please set OpenAI Proxy Base URL to https://localhost:8080/v1 under Advanced Settings")
})
)
.addText((text) => {
Expand Down

0 comments on commit 3417043

Please sign in to comment.