基于SpringBoot东燕手袋厂货物管理系统【Java毕设、springboot实战项目、vue项目、Java最新毕业项目】

34 阅读5分钟

💖💖作者:计算机毕业设计小途 💙💙个人简介:曾长期从事计算机专业培训教学,本人也热爱上课教学,语言擅长Java、微信小程序、Python、Golang、安卓Android等,开发项目包括大数据、深度学习、网站、小程序、安卓、算法。平常会做一些项目定制化开发、代码讲解、答辩教学、文档编写、也懂一些降重方面的技巧。平常喜欢分享一些自己开发中遇到的问题的解决办法,也喜欢交流技术,大家有技术代码这一块的问题可以问我! 💛💛想说的话:感谢大家的关注与支持! 💜💜 网站实战项目 安卓/小程序实战项目 大数据实战项目 深度学习实战项目

@TOC

基于SpringBoot的东燕手袋厂货物管理系统介绍

本系统《基于SpringBoot的东燕手袋厂货物管理系统》是一个旨在优化手袋制造企业核心业务流程的综合性信息管理平台。系统采用当前主流的B/S架构与MVC设计模式,以后端强大的SpringBoot框架(整合Spring、SpringMVC、Mybatis)为核心,构建稳定高效的服务层,负责处理复杂的业务逻辑、数据交互与权限控制;前端则运用现代化的Vue.js框架,配合ElementUI组件库,构建出清晰直观、操作流畅的用户界面,实现了前后端分离的开发模式。系统以关系型数据库MySQL作为数据存储基石,确保了数据的一致性与安全性。在功能模块设计上,系统全面覆盖了手袋厂从原材料管理到生产任务协调的关键环节:其中,“布料信息”与“布料入库/出库”模块实现了对布料库存的精细化管控,实时追踪库存动态;“手袋品种”与“手袋信息”模块建立了完善的产品档案库;而“生产任务”与“工费结算”模块则有效串联起生产指令下达与人员成本核算的完整链条,提升了生产协同效率。此外,系统还包含完善的“个人中心”与“系统管理”功能,保障了多角色用户(如生产员、仓管员)的权限分配与系统稳定运行。综上所述,该系统不仅是一个展示了SpringBoot+Vue全栈开发技术的典型实践,更是一个贴合实际工业需求、具备清晰业务逻辑的毕业设计范例。

基于SpringBoot的东燕手袋厂货物管理系统演示视频

演示视频

基于SpringBoot的东燕手袋厂货物管理系统演示图片

布料出库.png

布料入库.png

布料信息.png

仓管员信息.png

工费结算.png

生产任务.png

生产员信息.png

手袋信息.png

基于SpringBoot的东燕手袋厂货物管理系统代码展示

import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

@Service
public class FabricInventoryService {
    @Autowired
    private FabricMapper fabricMapper;
    private SparkSession spark = SparkSession.builder().appName("FabricAnalysis").master("local[*]").getOrCreate();
    public void processFabricInbound(FabricInboundRecord record) {
        if (record.getQuantity() <= 0) { throw new RuntimeException("入库数量必须大于0"); }
        Fabric existingFabric = fabricMapper.selectById(record.getFabricId());
        if (existingFabric == null) { throw new RuntimeException("布料不存在"); }
        int currentStock = existingFabric.getCurrentStock();
        int newStock = currentStock + record.getQuantity();
        existingFabric.setCurrentStock(newStock);
        fabricMapper.updateById(existingFabric);
        FabricInboundLog log = new FabricInboundLog();
        log.setFabricId(record.getFabricId());
        log.setQuantity(record.getQuantity());
        log.setOperator(record.getOperator());
        log.setInboundTime(new Date());
        fabricMapper.insertInboundLog(log);
        List<FabricInboundLog> recentLogs = fabricMapper.selectInboundLogsLast7Days();
        Dataset<Row> df = spark.createDataFrame(recentLogs, FabricInboundLog.class);
        df.groupBy("fabricId").sum("quantity").show();
    }
    public void processFabricOutbound(FabricOutboundRequest request) {
        Fabric fabric = fabricMapper.selectById(request.getFabricId());
        if (fabric == null) { throw new RuntimeException("布料不存在"); }
        if (fabric.getCurrentStock() < request.getAmount()) { throw new RuntimeException("库存不足"); }
        fabric.setCurrentStock(fabric.getCurrentStock() - request.getAmount());
        fabricMapper.updateById(fabric);
        FabricOutboundLog outboundLog = new FabricOutboundLog();
        outboundLog.setFabricId(request.getFabricId());
        outboundLog.setAmount(request.getAmount());
        outboundLog.setProductionTaskId(request.getProductionTaskId());
        outboundLog.setOutboundTime(new Date());
        fabricMapper.insertOutboundLog(outboundLog);
        List<FabricOutboundLog> outboundLogs = fabricMapper.selectOutboundLogsByTask(request.getProductionTaskId());
        Dataset<Row> outboundDf = spark.createDataFrame(outboundLogs, FabricOutboundLog.class);
        outboundDf.groupBy("fabricId").avg("amount").show();
    }
    public void analyzeFabricUsageTrend() {
        List<FabricUsageDTO> usageList = fabricMapper.selectFabricUsageStats();
        Dataset<Row> usageDf = spark.createDataFrame(usageList, FabricUsageDTO.class);
        usageDf.createOrReplaceTempView("fabric_usage");
        Dataset<Row> result = spark.sql("SELECT fabricType, AVG(dailyUsage) as avgUsage, SUM(totalUsage) as total FROM fabric_usage GROUP BY fabricType");
        result.show();
        List<Row> results = result.collectAsList();
        results.forEach(row -> {
            System.out.println("布料类型: " + row.getString(0) + ", 平均日用量: " + row.getDouble(1) + ", 总用量: " + row.getLong(2));
        });
    }
}
@Service
public class ProductionTaskService {
    @Autowired
    private ProductionTaskMapper productionTaskMapper;
    private SparkSession spark = SparkSession.builder().appName("ProductionAnalysis").master("local[*]").getOrCreate();
    public void createAndAssignProductionTask(ProductionTask task) {
        if (task.getRequiredQuantity() <= 0) { throw new RuntimeException("生产数量必须大于0"); }
        task.setStatus("待生产");
        task.setCreateTime(new Date());
        productionTaskMapper.insert(task);
        List<FabricRequirement> requirements = calculateFabricRequirement(task.getBagModelId(), task.getRequiredQuantity());
        requirements.forEach(req -> {
            req.setTaskId(task.getId());
            productionTaskMapper.insertFabricRequirement(req);
        });
        List<ProductionTask> pendingTasks = productionTaskMapper.selectTasksByStatus("待生产");
        Dataset<Row> taskDf = spark.createDataFrame(pendingTasks, ProductionTask.class);
        taskDf.groupBy("bagModelId").count().show();
    }
    public void completeProductionTask(Integer taskId, Integer actualOutput) {
        ProductionTask task = productionTaskMapper.selectById(taskId);
        if (task == null) { throw new RuntimeException("生产任务不存在"); }
        if (actualOutput <= 0) { throw new RuntimeException("实际产量必须大于0"); }
        task.setActualOutput(actualOutput);
        task.setStatus("已完成");
        task.setCompletionTime(new Date());
        productionTaskMapper.updateById(task);
        double efficiency = (double) actualOutput / task.getRequiredQuantity() * 100;
        task.setProductionEfficiency(efficiency);
        productionTaskMapper.updateEfficiency(task);
        List<ProductionTask> completedTasks = productionTaskMapper.selectCompletedTasksThisMonth();
        Dataset<Row> completedDf = spark.createDataFrame(completedTasks, ProductionTask.class);
        completedDf.createOrReplaceTempView("production_tasks");
        Dataset<Row> efficiencyDf = spark.sql("SELECT AVG(productionEfficiency) as avgEfficiency FROM production_tasks WHERE productionEfficiency > 0");
        efficiencyDf.show();
    }
    private List<FabricRequirement> calculateFabricRequirement(Integer bagModelId, Integer quantity) {
        BagModel model = productionTaskMapper.selectBagModelById(bagModelId);
        List<FabricRequirement> requirements = new ArrayList<>();
        if (model != null && model.getFabricConsumption() > 0) {
            FabricRequirement req = new FabricRequirement();
            req.setFabricTypeId(model.getMainFabricTypeId());
            req.setRequiredAmount(model.getFabricConsumption() * quantity);
            requirements.add(req);
        }
        return requirements;
    }
}
@Service
public class SettlementService {
    @Autowired
    private SettlementMapper settlementMapper;
    private SparkSession spark = SparkSession.builder().appName("SettlementAnalysis").master("local[*]").getOrCreate();
    public void calculateLaborCost(Integer taskId) {
        ProductionTask task = settlementMapper.selectTaskWithDetails(taskId);
        if (!"已完成".equals(task.getStatus())) { throw new RuntimeException("只能对已完成的任务进行结算"); }
        double totalLaborCost = task.getActualOutput() * task.getLaborCostPerUnit();
        LaborSettlement settlement = new LaborSettlement();
        settlement.setTaskId(taskId);
        settlement.setTotalLaborCost(totalLaborCost);
        settlement.setSettlementTime(new Date());
        settlement.setStatus("已结算");
        settlementMapper.insert(settlement);
        task.setSettlementStatus("已结算");
        settlementMapper.updateTaskSettlementStatus(task);
        List<LaborSettlement> monthlySettlements = settlementMapper.selectSettlementsByMonth(2024, 6);
        Dataset<Row> df = spark.createDataFrame(monthlySettlements, LaborSettlement.class);
        df.createOrReplaceTempView("labor_settlements");
        Dataset<Row> result = spark.sql("SELECT SUM(totalLaborCost) as totalCost, AVG(totalLaborCost) as avgCost FROM labor_settlements");
        result.show();
    }
    public void analyzeProductivity() {
        List<WorkerProductivity> productivityList = settlementMapper.selectWorkerProductivity();
        Dataset<Row> productivityDf = spark.createDataFrame(productivityList, WorkerProductivity.class);
        productivityDf.createOrReplaceTempView("worker_productivity");
        Dataset<Row> result = spark.sql("SELECT workerId, AVG(outputPerDay) as avgOutput, SUM(totalOutput) as totalOutput FROM worker_productivity GROUP BY workerId");
        List<Row> rows = result.collectAsList();
        rows.forEach(row -> {
            System.out.println("员工ID: " + row.getInt(0) + ", 平均日产量: " + row.getDouble(1) + ", 总产量: " + row.getLong(2));
        });
    }
}

基于SpringBoot的东燕手袋厂货物管理系统文档展示

文档.png

💖💖作者:计算机毕业设计小途 💙💙个人简介:曾长期从事计算机专业培训教学,本人也热爱上课教学,语言擅长Java、微信小程序、Python、Golang、安卓Android等,开发项目包括大数据、深度学习、网站、小程序、安卓、算法。平常会做一些项目定制化开发、代码讲解、答辩教学、文档编写、也懂一些降重方面的技巧。平常喜欢分享一些自己开发中遇到的问题的解决办法,也喜欢交流技术,大家有技术代码这一块的问题可以问我! 💛💛想说的话:感谢大家的关注与支持! 💜💜 网站实战项目 安卓/小程序实战项目 大数据实战项目 深度学习实战项目