Skip to content

Commit 48a565f

Browse files
committed
#9 start to change ollama provider to ollama jssdk
1 parent 841080a commit 48a565f

File tree

6 files changed

+482
-2
lines changed

6 files changed

+482
-2
lines changed

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,7 @@
8888
"mermaid": "^11.4.1",
8989
"mime-types": "^2.1.35",
9090
"nanoid": "^5.0.9",
91+
"ollama": "^0.5.14",
9192
"openai": "^4.85.1",
9293
"pdf-parse-new": "^1.3.9",
9394
"pinia": "^3.0.1",

src/main/events.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,3 +43,8 @@ export const WINDOW_EVENTS = {
4343
READY_TO_SHOW: 'window:ready-to-show', // 替代 main-window-ready-to-show
4444
FORCE_QUIT_APP: 'window:force-quit-app' // 替代 force-quit-app
4545
}
46+
47+
// ollama 相关事件
48+
export const OLLAMA_EVENTS = {
49+
PULL_MODEL_PROGRESS: 'ollama:pull-model-progress'
50+
}

src/main/presenter/llmProviderPresenter/index.ts

Lines changed: 75 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,10 @@
1-
import { ILlmProviderPresenter, LLM_PROVIDER, LLMResponse, MODEL_META } from '@shared/presenter'
1+
import {
2+
ILlmProviderPresenter,
3+
LLM_PROVIDER,
4+
LLMResponse,
5+
MODEL_META,
6+
OllamaModel
7+
} from '@shared/presenter'
28
import { BaseLLMProvider } from './baseProvider'
39
import { OpenAIProvider } from './providers/openAIProvider'
410
import { DeepseekProvider } from './providers/deepseekProvider'
@@ -7,11 +13,13 @@ import { eventBus } from '@/eventbus'
713
import { OpenAICompatibleProvider } from './providers/openAICompatibleProvider'
814
import { PPIOProvider } from './providers/ppioProvider'
915
import { getModelConfig } from './modelConfigs'
10-
import { STREAM_EVENTS } from '@/events'
16+
import { OLLAMA_EVENTS, STREAM_EVENTS } from '@/events'
1117
import { ConfigPresenter } from '../configPresenter'
1218
import { GeminiProvider } from './providers/geminiProvider'
1319
import { DEFAULT_PROVIDERS } from '../configPresenter/providers'
1420
import { GithubProvider } from './providers/githubProvider'
21+
import { OllamaProvider } from './providers/ollamaProvider'
22+
import { ShowResponse } from 'ollama'
1523
// 导入其他provider...
1624

1725
// 流的状态
@@ -130,6 +138,9 @@ export class LLMProviderPresenter implements ILlmProviderPresenter {
130138
instance = new GithubProvider(provider, this.configPresenter)
131139
break
132140
// 添加其他provider的实例化逻辑
141+
case 'ollama':
142+
instance = new OllamaProvider(provider, this.configPresenter)
143+
break
133144
default:
134145
instance = new OpenAICompatibleProvider(provider, this.configPresenter)
135146
break
@@ -438,4 +449,66 @@ export class LLMProviderPresenter implements ILlmProviderPresenter {
438449
const provider = this.getProviderInstance(providerId)
439450
return provider.summaryTitles(messages, modelId)
440451
}
452+
453+
// 获取 OllamaProvider 实例
454+
getOllamaProviderInstance(): OllamaProvider | null {
455+
// 检查当前激活的 provider 是否是 ollama
456+
if (!this.currentProviderId) {
457+
return null
458+
}
459+
460+
const currentProvider = this.providers.get(this.currentProviderId)
461+
if (!currentProvider || currentProvider.apiType !== 'ollama') {
462+
return null
463+
}
464+
465+
const providerInstance = this.providerInstances.get(this.currentProviderId)
466+
if (!providerInstance || !(providerInstance instanceof OllamaProvider)) {
467+
return null
468+
}
469+
470+
return providerInstance
471+
}
472+
// ollama api
473+
listOllamaModels(): Promise<OllamaModel[]> {
474+
const provider = this.getOllamaProviderInstance()
475+
if (!provider) {
476+
throw new Error('Ollama provider not found')
477+
}
478+
return provider.listModels()
479+
}
480+
showOllamaModelInfo(modelName: string): Promise<ShowResponse> {
481+
const provider = this.getOllamaProviderInstance()
482+
if (!provider) {
483+
throw new Error('Ollama provider not found')
484+
}
485+
return provider.showModelInfo(modelName)
486+
}
487+
listOllamaRunningModels(): Promise<OllamaModel[]> {
488+
const provider = this.getOllamaProviderInstance()
489+
if (!provider) {
490+
throw new Error('Ollama provider not found')
491+
}
492+
return provider.listRunningModels()
493+
}
494+
pullOllamaModels(modelName: string): Promise<boolean> {
495+
const provider = this.getOllamaProviderInstance()
496+
if (!provider) {
497+
throw new Error('Ollama provider not found')
498+
}
499+
return provider.pullModel(modelName, (progress) => {
500+
console.log('pullOllamaModels', progress)
501+
eventBus.emit(OLLAMA_EVENTS.PULL_MODEL_PROGRESS, {
502+
eventId: 'pullOllamaModels',
503+
...progress
504+
})
505+
})
506+
}
507+
deleteOllamaModel(modelName: string): Promise<boolean> {
508+
const provider = this.getOllamaProviderInstance()
509+
if (!provider) {
510+
throw new Error('Ollama provider not found')
511+
}
512+
return provider.deleteModel(modelName)
513+
}
441514
}

0 commit comments

Comments
 (0)