💖💖作者:计算机毕业设计江挽 💙💙个人简介:曾长期从事计算机专业培训教学,本人也热爱上课教学,语言擅长Java、微信小程序、Python、Golang、安卓Android等,开发项目包括大数据、深度学习、网站、小程序、安卓、算法。平常会做一些项目定制化开发、代码讲解、答辩教学、文档编写、也懂一些降重方面的技巧。平常喜欢分享一些自己开发中遇到的问题的解决办法,也喜欢交流技术,大家有技术代码这一块的问题可以问我! 💛💛想说的话:感谢大家的关注与支持! 💜💜 网站实战项目 安卓/小程序实战项目 大数据实战项目 深度学习实战项目
智慧农业管理系统介绍
智慧农业管理系统是一个基于Vue+SpringBoot技术架构的B/S结构农业数字化管理平台,采用MySQL数据库存储农业生产相关数据,通过ElementUI组件库构建美观实用的用户界面。该系统主要服务于现代农业生产管理需求,提供用户权限管理、大棚信息录入与维护、传感器设备监控等核心功能模块。系统后端采用SpringBoot框架搭建RESTful API接口,结合MyBatis持久层框架实现数据的增删改查操作,前端使用Vue.js响应式框架配合ElementUI组件快速构建交互友好的管理界面。整个系统支持多用户角色登录,能够实现农业生产信息的集中化管理,通过传感器数据采集功能帮助农业从业者实时掌握大棚内环境参数变化情况。系统设计遵循模块化开发思路,各功能模块相对独立又紧密协作,为农业生产管理提供了一套完整的数字化解决方案,有助于提升农业生产效率和管理水平。
智慧农业管理系统演示视频
智慧农业管理系统演示图片
智慧农业管理系统代码展示
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
@RestController
@RequestMapping("/api")
public class AgricultureController {
private SparkSession spark = SparkSession.builder()
.appName("AgricultureManagement")
.master("local[*]")
.config("spark.sql.adaptive.enabled", "true")
.getOrCreate();
@PostMapping("/user/batch-process")
public ResponseEntity<Map<String, Object>> batchProcessUsers(@RequestBody List<User> users) {
Map<String, Object> result = new HashMap<>();
try {
Dataset<Row> userDataset = spark.createDataFrame(users, User.class);
userDataset.createOrReplaceTempView("user_temp");
Dataset<Row> processedUsers = spark.sql(
"SELECT *, CASE WHEN age >= 18 THEN 'adult' ELSE 'minor' END as user_type FROM user_temp"
);
Dataset<Row> validUsers = processedUsers.filter("username IS NOT NULL AND password IS NOT NULL");
long totalCount = validUsers.count();
Dataset<Row> adultUsers = validUsers.filter("user_type = 'adult'");
long adultCount = adultUsers.count();
List<Row> userList = validUsers.collectAsList();
for (Row row : userList) {
User user = new User();
user.setUsername(row.getAs("username"));
user.setPassword(passwordEncoder.encode(row.getAs("password")));
user.setEmail(row.getAs("email"));
user.setAge(row.getAs("age"));
user.setUserType(row.getAs("user_type"));
user.setCreateTime(new Date());
userMapper.insertUser(user);
}
result.put("success", true);
result.put("totalProcessed", totalCount);
result.put("adultUsers", adultCount);
result.put("message", "用户批量处理完成");
} catch (Exception e) {
result.put("success", false);
result.put("message", "批量处理失败: " + e.getMessage());
}
return ResponseEntity.ok(result);
}
@PostMapping("/greenhouse/analyze-data")
public ResponseEntity<Map<String, Object>> analyzeGreenhouseData(@RequestBody List<Greenhouse> greenhouses) {
Map<String, Object> result = new HashMap<>();
try {
Dataset<Row> greenhouseDataset = spark.createDataFrame(greenhouses, Greenhouse.class);
greenhouseDataset.createOrReplaceTempView("greenhouse_temp");
Dataset<Row> analyzedData = spark.sql(
"SELECT area, temperature, humidity, " +
"CASE WHEN temperature > 30 THEN 'high_temp' " +
"WHEN temperature < 15 THEN 'low_temp' " +
"ELSE 'normal_temp' END as temp_status, " +
"CASE WHEN humidity > 80 THEN 'high_humidity' " +
"WHEN humidity < 40 THEN 'low_humidity' " +
"ELSE 'normal_humidity' END as humidity_status FROM greenhouse_temp"
);
Dataset<Row> highTempGreenhouses = analyzedData.filter("temp_status = 'high_temp'");
Dataset<Row> lowTempGreenhouses = analyzedData.filter("temp_status = 'low_temp'");
Dataset<Row> abnormalHumidity = analyzedData.filter("humidity_status != 'normal_humidity'");
long highTempCount = highTempGreenhouses.count();
long lowTempCount = lowTempGreenhouses.count();
long abnormalHumidityCount = abnormalHumidity.count();
double avgTemperature = analyzedData.agg(avg("temperature")).first().getDouble(0);
double avgHumidity = analyzedData.agg(avg("humidity")).first().getDouble(0);
List<Row> processedList = analyzedData.collectAsList();
for (Row row : processedList) {
Greenhouse greenhouse = new Greenhouse();
greenhouse.setArea(row.getAs("area"));
greenhouse.setTemperature(row.getAs("temperature"));
greenhouse.setHumidity(row.getAs("humidity"));
greenhouse.setTempStatus(row.getAs("temp_status"));
greenhouse.setHumidityStatus(row.getAs("humidity_status"));
greenhouse.setAnalyzeTime(new Date());
greenhouseMapper.updateGreenhouseAnalysis(greenhouse);
}
result.put("success", true);
result.put("highTempCount", highTempCount);
result.put("lowTempCount", lowTempCount);
result.put("abnormalHumidityCount", abnormalHumidityCount);
result.put("avgTemperature", avgTemperature);
result.put("avgHumidity", avgHumidity);
result.put("message", "大棚数据分析完成");
} catch (Exception e) {
result.put("success", false);
result.put("message", "数据分析失败: " + e.getMessage());
}
return ResponseEntity.ok(result);
}
@PostMapping("/sensor/real-time-process")
public ResponseEntity<Map<String, Object>> processRealTimeSensorData(@RequestBody List<SensorData> sensorDataList) {
Map<String, Object> result = new HashMap<>();
try {
Dataset<Row> sensorDataset = spark.createDataFrame(sensorDataList, SensorData.class);
sensorDataset.createOrReplaceTempView("sensor_temp");
Dataset<Row> processedSensorData = spark.sql(
"SELECT sensor_id, sensor_type, sensor_value, timestamp, " +
"CASE WHEN sensor_type = 'temperature' AND sensor_value > 35 THEN 'alert' " +
"WHEN sensor_type = 'humidity' AND (sensor_value > 90 OR sensor_value < 30) THEN 'alert' " +
"WHEN sensor_type = 'light' AND sensor_value < 1000 THEN 'alert' " +
"ELSE 'normal' END as alert_status FROM sensor_temp"
);
Dataset<Row> alertSensors = processedSensorData.filter("alert_status = 'alert'");
Dataset<Row> temperatureSensors = processedSensorData.filter("sensor_type = 'temperature'");
Dataset<Row> humiditySensors = processedSensorData.filter("sensor_type = 'humidity'");
Dataset<Row> lightSensors = processedSensorData.filter("sensor_type = 'light'");
long alertCount = alertSensors.count();
double avgTemperature = temperatureSensors.agg(avg("sensor_value")).first().getDouble(0);
double avgHumidity = humiditySensors.agg(avg("sensor_value")).first().getDouble(0);
double avgLight = lightSensors.agg(avg("sensor_value")).first().getDouble(0);
List<Row> alertList = alertSensors.collectAsList();
for (Row row : alertList) {
SensorAlert alert = new SensorAlert();
alert.setSensorId(row.getAs("sensor_id"));
alert.setSensorType(row.getAs("sensor_type"));
alert.setSensorValue(row.getAs("sensor_value"));
alert.setAlertStatus(row.getAs("alert_status"));
alert.setAlertTime(new Date());
alert.setProcessed(false);
sensorMapper.insertSensorAlert(alert);
}
List<Row> allSensorData = processedSensorData.collectAsList();
for (Row row : allSensorData) {
SensorData data = new SensorData();
data.setSensorId(row.getAs("sensor_id"));
data.setSensorType(row.getAs("sensor_type"));
data.setSensorValue(row.getAs("sensor_value"));
data.setAlertStatus(row.getAs("alert_status"));
data.setProcessTime(new Date());
sensorMapper.insertProcessedSensorData(data);
}
result.put("success", true);
result.put("alertCount", alertCount);
result.put("avgTemperature", avgTemperature);
result.put("avgHumidity", avgHumidity);
result.put("avgLight", avgLight);
result.put("totalProcessed", allSensorData.size());
result.put("message", "传感器数据实时处理完成");
} catch (Exception e) {
result.put("success", false);
result.put("message", "传感器数据处理失败: " + e.getMessage());
}
return ResponseEntity.ok(result);
}
}
智慧农业管理系统文档展示
💖💖作者:计算机毕业设计江挽 💙💙个人简介:曾长期从事计算机专业培训教学,本人也热爱上课教学,语言擅长Java、微信小程序、Python、Golang、安卓Android等,开发项目包括大数据、深度学习、网站、小程序、安卓、算法。平常会做一些项目定制化开发、代码讲解、答辩教学、文档编写、也懂一些降重方面的技巧。平常喜欢分享一些自己开发中遇到的问题的解决办法,也喜欢交流技术,大家有技术代码这一块的问题可以问我! 💛💛想说的话:感谢大家的关注与支持! 💜💜 网站实战项目 安卓/小程序实战项目 大数据实战项目 深度学习实战项目