java

关注公众号 jb51net

关闭
首页 > 软件编程 > java > deepseek本地部署java、python调用

deepseek本地部署及java、python调用步骤详解

作者:Most666

这篇文章主要介绍了如何下载和使用Ollama模型,包括安装JDK 17及以上版本和Spring Boot 3.3.6,配置pom文件和application.yml,创建Controller,以及使用Python调用模型,需要的朋友可以参考下

1.下载Ollama

(需要科学上网)

https://ollama.com/

2.拉取模型

输入命令

ollama pull deepseek-v3

由于v3太大,改为r1,命令为:

ollama run deepseek-r1:1.5b

查看安装的模型

ollama ls

查看启动的模型

ollama ps

对话框输入/bye退出

3.Java调用

目前仅支持jdk17以上版本使用,本文使用的是jdk21,springboot版本为3.3.6版本过高、过低时都无法正常启动

3.1引入pom

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>3.3.6</version>
        <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.example</groupId>
    <artifactId>demo21</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>demo21</name>
    <description>demo21</description>

    <properties>
        <java.version>21</java.version>
    </properties>
    <dependencies>
        <dependency>
            <groupId>io.springboot.ai</groupId>
            <artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
            <version>1.0.3</version>
        </dependency>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>

        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <optional>true</optional>
        </dependency>

    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <annotationProcessorPaths>
                        <path>
                            <groupId>org.projectlombok</groupId>
                            <artifactId>lombok</artifactId>
                        </path>
                    </annotationProcessorPaths>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.springframework.boot</groupId>
                <artifactId>spring-boot-maven-plugin</artifactId>
                <configuration>
                    <excludes>
                        <exclude>
                            <groupId>org.projectlombok</groupId>
                            <artifactId>lombok</artifactId>
                        </exclude>
                    </excludes>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>

3.2配置application.yml

server:
  port: 8088
spring:
  application:
    name: demo21
  ai:
    ollama:
      base-url: http://localhost:11434
      chat:
        options:
          model: deepseek-r1:1.5b

3.2创建Controller

import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

@RestController
public class OllamaClientController {

    @Autowired
    @Qualifier("ollamaChatClient")
    private OllamaChatClient ollamaChatClient;

    /**
     * http://localhost:8088/ollama/chat/v1?msg=java就业前景
     */
    @GetMapping("/ollama/chat/v1")
    public String ollamaChat(@RequestParam String msg) {
        return this.ollamaChatClient.call(msg);
    }

    /**
     * http://localhost:8088/ollama/chat/v2?msg=java就业前景
     */
    @GetMapping("/ollama/chat/v2")
    public Object ollamaChatV2(@RequestParam String msg) {
        Prompt prompt = new Prompt(msg);
        ChatResponse chatResponse = ollamaChatClient.call(prompt);
        return chatResponse.getResult().getOutput().getContent();
    }

    /**
     * http://localhost:8088/ollama/chat/v3?msg=java就业前景
     */
    @GetMapping("/ollama/chat/v3")
    public Object ollamaChatV3(@RequestParam String msg) {
        Prompt prompt = new Prompt(
                msg,
                OllamaOptions.create()
                        .withModel("deepseek-r1:1.5b")
                        .withTemperature(0.4F));
        ChatResponse chatResponse = ollamaChatClient.call(prompt);
        return chatResponse.getResult().getOutput().getContent();
    }
}

4.python调用

pip引入

pip install ollama

创建.py文件

import ollama

# 流式输出
def api_generate(text: str):
    print(f'提问:{text}')

    stream = ollama.generate(
        stream=True,
        model='deepseek-r1:1.5b',
        prompt=text,
    )

    print('-----------------------------------------')
    for chunk in stream:
        if not chunk['done']:
            print(chunk['response'], end='', flush=True)
        else:
            print('\n')
            print('-----------------------------------------')
            print(f'总耗时:{chunk['total_duration']}')
            print('-----------------------------------------')

def api_chat(text: str):
    print(f'提问:{text}')

    stream = ollama.chat(
        stream=True,
        model='deepseek-r1:1.5b',
        messages=[{"role":"user","content":text}]
    )

    print('-----------------------------------------')
    for chunk in stream:
        if not chunk['done']:
            print(chunk['message'].content, end='', flush=True)
        else:
            print('\n')
            print('-----------------------------------------')
            print(f'总耗时:{chunk['total_duration']}')
            print('-----------------------------------------')

if __name__ == '__main__':
    # 流式输出
    api_generate(text='python就业前景')
    
    api_chat(text='python就业前景')

    # 非流式输出
    content = ollama.generate(model='deepseek-r1:1.5b', prompt='python就业前景')
    print(content)

    content = ollama.chat(model='deepseek-r1:1.5b', messages=[{"role":"user","content":'python就业前景'}])
    print(content)

总结 

到此这篇关于deepseek本地部署及java、python调用的文章就介绍到这了,更多相关deepseek本地部署java、python调用内容请搜索脚本之家以前的文章或继续浏览下面的相关文章希望大家以后多多支持脚本之家!

您可能感兴趣的文章:
阅读全文