实用技巧

关注公众号 jb51net

关闭
首页 > 网络编程 > ASP.NET > 实用技巧 > .NET 8大文件分片上传

.NET 8实现大文件分片上传的高效方案汇总

作者:Microi风闲

在当今互联网应用中,大文件上传是一个常见需求,尤其是对于云存储、视频网站、企业文档管理系统等场景,传统的单次文件上传方式在面对大文件时往往会遇到网络不稳定、内存占用高、上传失败需重传整个文件等问题,本文将介绍如何在.NET 8中实现高效稳定的大文件分片上传方案

一、分片上传的优势

  1. 提高上传稳定性:单个分片上传失败只需重传该分片,而非整个文件
  2. 降低内存占用:每次只处理文件的一小部分,避免大文件完全加载到内存
  3. 支持断点续传:记录已上传分片,可从断点继续上传
  4. 并行上传:可同时上传多个分片,提高上传速度
  5. 进度显示:可精确显示上传进度,提升用户体验

二、.NET 8 分片上传实现

2.1 前端实现(JavaScript)

// 文件选择处理
document.getElementById('fileInput').addEventListener('change', async function(e) {
    const file = e.target.files[0];
    if (!file) return;

    const chunkSize = 5 * 1024 * 1024; // 5MB分片
    const totalChunks = Math.ceil(file.size / chunkSize);
    const fileId = generateFileId(file.name, file.size); // 生成唯一文件ID
    
    // 并行上传控制(限制同时上传的分片数)
    const parallelLimit = 3;
    let currentChunk = 0;
    let activeUploads = 0;
    let uploadedChunks = 0;

    while (currentChunk < totalChunks || activeUploads > 0) {
        if (activeUploads < parallelLimit && currentChunk < totalChunks) {
            activeUploads++;
            const chunkStart = currentChunk * chunkSize;
            const chunkEnd = Math.min(file.size, chunkStart + chunkSize);
            const chunk = file.slice(chunkStart, chunkEnd);
            
            try {
                await uploadChunk(fileId, currentChunk, chunk, totalChunks, file.name);
                uploadedChunks++;
                updateProgress(uploadedChunks / totalChunks * 100);
            } catch (error) {
                console.error(`分片 ${currentChunk} 上传失败:`, error);
                // 可加入重试逻辑
                continue; // 重新尝试当前分片
            } finally {
                activeUploads--;
            }
            
            currentChunk++;
        } else {
            // 等待有上传完成
            await new Promise(resolve => setTimeout(resolve, 100));
        }
    }
    
    // 所有分片上传完成,通知服务器合并
    await notifyServerToMerge(fileId, file.name, totalChunks);
    console.log('文件上传完成');
});

async function uploadChunk(fileId, chunkNumber, chunkData, totalChunks, fileName) {
    const formData = new FormData();
    formData.append('fileId', fileId);
    formData.append('chunkNumber', chunkNumber);
    formData.append('totalChunks', totalChunks);
    formData.append('fileName', fileName);
    formData.append('chunk', chunkData);

    const response = await fetch('/api/upload/chunk', {
        method: 'POST',
        body: formData
    });

    if (!response.ok) {
        throw new Error('上传失败');
    }
}

function updateProgress(percent) {
    console.log(`上传进度: ${percent.toFixed(2)}%`);
    // 更新UI进度条
    document.getElementById('progressBar').style.width = `${percent}%`;
}

2.2 后端实现(.NET 8 Web API)

控制器代码

[ApiController]
[Route("api/[controller]")]
public class UploadController : ControllerBase
{
    private readonly IFileUploadService _uploadService;
    private readonly ILogger<UploadController> _logger;

    public UploadController(IFileUploadService uploadService, ILogger<UploadController> logger)
    {
        _uploadService = uploadService;
        _logger = logger;
    }

    [HttpPost("chunk")]
    [DisableRequestSizeLimit] // 禁用请求大小限制
    public async Task<IActionResult> UploadChunk()
    {
        try
        {
            var form = await Request.ReadFormAsync();
            var chunk = form.Files["chunk"];
            
            if (chunk == null || chunk.Length == 0)
                return BadRequest("无效的分片数据");

            var fileId = form["fileId"].ToString();
            var chunkNumber = int.Parse(form["chunkNumber"].ToString());
            var totalChunks = int.Parse(form["totalChunks"].ToString());
            var fileName = form["fileName"].ToString();

            await _uploadService.SaveChunkAsync(fileId, chunkNumber, totalChunks, fileName, chunk);

            return Ok(new { chunkNumber, fileId });
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "分片上传失败");
            return StatusCode(500, $"分片上传失败: {ex.Message}");
        }
    }

    [HttpPost("merge")]
    public async Task<IActionResult> MergeChunks([FromBody] MergeRequest request)
    {
        try
        {
            var filePath = await _uploadService.MergeChunksAsync(request.FileId, request.FileName, request.TotalChunks);
            return Ok(new { filePath });
        }
        catch (Exception ex)
        {
            _logger.LogError(ex, "分片合并失败");
            return StatusCode(500, $"分片合并失败: {ex.Message}");
        }
    }
}

public record MergeRequest(string FileId, string FileName, int TotalChunks);

文件上传服务实现

public interface IFileUploadService
{
    Task SaveChunkAsync(string fileId, int chunkNumber, int totalChunks, string fileName, IFormFile chunk);
    Task<string> MergeChunksAsync(string fileId, string fileName, int totalChunks);
}

public class FileUploadService : IFileUploadService
{
    private readonly string _uploadPath;
    private readonly ILogger<FileUploadService> _logger;

    public FileUploadService(IConfiguration configuration, ILogger<FileUploadService> logger)
    {
        _uploadPath = configuration["FileUpload:Path"] ?? Path.Combine(Directory.GetCurrentDirectory(), "Uploads");
        _logger = logger;
        
        if (!Directory.Exists(_uploadPath))
        {
            Directory.CreateDirectory(_uploadPath);
        }
    }

    public async Task SaveChunkAsync(string fileId, int chunkNumber, int totalChunks, string fileName, IFormFile chunk)
    {
        // 为每个文件创建临时目录
        var tempDir = Path.Combine(_uploadPath, fileId);
        if (!Directory.Exists(tempDir))
        {
            Directory.CreateDirectory(tempDir);
        }

        var chunkPath = Path.Combine(tempDir, $"{chunkNumber}.part");
        
        // 使用文件流写入,避免内存占用过高
        await using var stream = new FileStream(chunkPath, FileMode.Create);
        await chunk.CopyToAsync(stream);
        
        _logger.LogInformation("保存分片 {ChunkNumber}/{TotalChunks} 成功,文件ID: {FileId}", 
            chunkNumber, totalChunks, fileId);
    }

    public async Task<string> MergeChunksAsync(string fileId, string fileName, int totalChunks)
    {
        var tempDir = Path.Combine(_uploadPath, fileId);
        if (!Directory.Exists(tempDir))
        {
            throw new DirectoryNotFoundException($"临时目录不存在: {tempDir}");
        }

        // 验证所有分片是否都存在
        for (int i = 0; i < totalChunks; i++)
        {
            var chunkPath = Path.Combine(tempDir, $"{i}.part");
            if (!System.IO.File.Exists(chunkPath))
            {
                throw new FileNotFoundException($"分片 {i} 不存在", chunkPath);
            }
        }

        // 最终文件路径
        var filePath = Path.Combine(_uploadPath, $"{fileId}_{fileName}");
        
        // 合并分片
        await using var outputStream = new FileStream(filePath, FileMode.Create);
        for (int i = 0; i < totalChunks; i++)
        {
            var chunkPath = Path.Combine(tempDir, $"{i}.part");
            await using var chunkStream = new FileStream(chunkPath, FileMode.Open);
            await chunkStream.CopyToAsync(outputStream);
            
            _logger.LogDebug("已合并分片 {ChunkNumber}/{TotalChunks}", i, totalChunks);
        }

        // 删除临时分片
        try
        {
            Directory.Delete(tempDir, true);
            _logger.LogInformation("文件合并完成,临时目录已删除: {TempDir}", tempDir);
        }
        catch (Exception ex)
        {
            _logger.LogWarning(ex, "删除临时目录失败: {TempDir}", tempDir);
        }

        return filePath;
    }
}

2.3 配置与注册服务

在 Program.cs 中添加服务注册和配置:

var builder = WebApplication.CreateBuilder(args);

// 添加服务
builder.Services.AddScoped<IFileUploadService, FileUploadService>();

// 配置上传路径
builder.Services.Configure<FileUploadOptions>(builder.Configuration.GetSection("FileUpload"));

var app = builder.Build();

// 启用静态文件服务(如果需要下载)
app.UseStaticFiles(new StaticFileOptions
{
    FileProvider = new PhysicalFileProvider(
        Path.Combine(builder.Environment.ContentRootPath, "Uploads")),
    RequestPath = "/uploads"
});

app.MapControllers();
app.Run();

在 appsettings.json 中添加配置:

{
  "FileUpload": {
    "Path": "Uploads",
    "MaxFileSize": "1073741824" // 1GB
  }
}

三、高级功能实现

3.1 断点续传

[HttpGet("check")]
public IActionResult CheckChunks(string fileId, int totalChunks)
{
    var tempDir = Path.Combine(_uploadPath, fileId);
    if (!Directory.Exists(tempDir))
    {
        return Ok(new { uploadedChunks = Array.Empty<int>() });
    }

    var uploaded = Directory.GetFiles(tempDir)
        .Select(f => Path.GetFileNameWithoutExtension(f))
        .Where(f => int.TryParse(f, out _))
        .Select(int.Parse)
        .ToArray();

    return Ok(new { uploadedChunks = uploaded });
}

前端相应修改:

// 在上传前检查已上传的分片
const checkResponse = await fetch(`/api/upload/check?fileId=${fileId}&totalChunks=${totalChunks}`);
const { uploadedChunks } = await checkResponse.json();

// 跳过已上传的分片
while (currentChunk < totalChunks) {
    if (uploadedChunks.includes(currentChunk)) {
        currentChunk++;
        uploadedChunks++;
        updateProgress(uploadedChunks / totalChunks * 100);
        continue;
    }
    // ...原有上传逻辑
}

3.2 文件校验(MD5/SHA)

public async Task<string> CalculateFileHash(string filePath)
{
    await using var stream = System.IO.File.OpenRead(filePath);
    using var sha256 = SHA256.Create();
    var hashBytes = await sha256.ComputeHashAsync(stream);
    return BitConverter.ToString(hashBytes).Replace("-", "").ToLowerInvariant();
}

// 在上传完成后验证文件完整性
var calculatedHash = await CalculateFileHash(filePath);
if (calculatedHash != expectedHash)
{
    System.IO.File.Delete(filePath);
    throw new Exception("文件校验失败,可能在上传过程中损坏");
}

3.3 分片大小动态调整

根据网络状况动态调整分片大小:

// 动态调整分片大小
let chunkSize = 1 * 1024 * 1024; // 初始1MB
let uploadSpeeds = [];

async function uploadChunk(...) {
    const startTime = performance.now();
    // ...上传逻辑
    const endTime = performance.now();
    const duration = (endTime - startTime) / 1000; // 秒
    const speed = chunkData.size / duration; // bytes/s
    
    uploadSpeeds.push(speed);
    if (uploadSpeeds.length > 5) {
        uploadSpeeds.shift();
    }
    
    const avgSpeed = uploadSpeeds.reduce((sum, val) => sum + val, 0) / uploadSpeeds.length;
    
    // 根据平均速度调整分片大小 (目标: 每个分片上传时间在5-15秒之间)
    const targetChunkTime = 10; // 10秒
    chunkSize = Math.min(
        50 * 1024 * 1024, // 最大50MB
        Math.max(
            1 * 1024 * 1024, // 最小1MB
            Math.round(avgSpeed * targetChunkTime)
        )
    );
}

四、性能优化与安全考虑

  1. 性能优化

    • 使用流式处理而非缓冲整个文件
    • 并行上传控制
    • 动态分片大小调整
    • 内存管理优化
  2. 安全考虑

    • 文件类型检查
    • 文件大小限制
    • 病毒扫描集成
    • 访问控制与权限验证
    • 文件名校验与处理
  3. 错误处理

    • 网络中断重试机制
    • 分片校验
    • 超时处理
    • 并发冲突处理

五、测试建议

  1. 单元测试:

    • 分片保存与合并功能
    • 文件校验逻辑
    • 异常情况处理
  2. 集成测试:

    • 完整上传流程
    • 断点续传场景
    • 网络不稳定的情况
  3. 性能测试:

    • 不同文件大小的上传时间
    • 并发上传测试
    • 内存占用监控

六、总结

本文详细介绍了在.NET 8中实现大文件分片上传的完整方案,包括前端分片处理、后端分片接收与合并、断点续传等高级功能。该方案具有以下特点:

  1. 高效稳定,适合大文件上传场景
  2. 内存占用低,使用流式处理
  3. 支持断点续传,提升用户体验
  4. 可扩展性强,易于添加文件校验、病毒扫描等功能

通过合理配置和优化,该方案可以满足企业级应用对大文件上传的需求,为用户提供流畅可靠的上传体验。

以上就是.NET 8实现大文件分片上传的高效方案汇总的详细内容,更多关于.NET 8大文件分片上传的资料请关注脚本之家其它相关文章!

您可能感兴趣的文章:
阅读全文