💖💖作者:计算机毕业设计杰瑞 💙💙个人简介:曾长期从事计算机专业培训教学,本人也热爱上课教学,语言擅长Java、微信小程序、Python、Golang、安卓Android等,开发项目包括大数据、深度学习、网站、小程序、安卓、算法。平常会做一些项目定制化开发、代码讲解、答辩教学、文档编写、也懂一些降重方面的技巧。平常喜欢分享一些自己开发中遇到的问题的解决办法,也喜欢交流技术,大家有技术代码这一块的问题可以问我! 💛💛想说的话:感谢大家的关注与支持! 💜💜 网站实战项目 安卓/小程序实战项目 大数据实战项目 深度学校实战项目 计算机毕业设计选题推荐
外卖配送分析与可视化系统介绍
外卖配送分析与可视化系统是一个基于大数据技术栈的综合性数据分析平台,专门针对外卖行业的配送效率、用户行为和商家运营进行深度数据挖掘与可视化展示。系统采用Hadoop+Spark作为大数据处理核心,结合Python开发语言和Django后端框架,前端使用Vue+ElementUI+Echarts技术栈构建交互式数据大屏。整个系统涵盖了用户管理、美食分类管理、美食信息维护、订单派送分析、美食数据统计、系统管理、订单管理等核心功能模块。通过Spark SQL对海量外卖订单数据进行实时计算和批处理分析,系统能够提供配送时长分布、热门美食排行、订单地域分析、配送员效率评估等多维度数据洞察。利用Pandas和NumPy进行数据预处理,结合Echarts图表库实现动态数据可视化,为外卖平台的运营决策提供数据支撑。系统架构设计注重扩展性和性能优化,HDFS作为分布式存储保证数据安全,MySQL存储结构化业务数据,整体形成了完整的外卖配送数据分析解决方案。
外卖配送分析与可视化系统演示视频
外卖配送分析与可视化系统演示图片
外卖配送分析与可视化系统代码展示
from pyspark.sql import SparkSession
from pyspark.sql.functions import *
from pyspark.sql.types import *
import pandas as pd
import numpy as np
from datetime import datetime, timedelta
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
import json
spark = SparkSession.builder.appName("外卖配送数据分析").config("spark.sql.adaptive.enabled", "true").config("spark.sql.adaptive.coalescePartitions.enabled", "true").getOrCreate()
def analyze_delivery_performance(request):
"""订单派送分析核心功能"""
if request.method == 'POST':
data = json.loads(request.body)
start_date = data.get('start_date')
end_date = data.get('end_date')
region_filter = data.get('region', None)
orders_df = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/takeaway").option("dbtable", "orders").option("user", "root").option("password", "password").load()
delivery_df = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/takeaway").option("dbtable", "delivery_records").option("user", "root").option("password", "password").load()
joined_df = orders_df.join(delivery_df, orders_df.order_id == delivery_df.order_id, "inner")
filtered_df = joined_df.filter((col("order_time") >= start_date) & (col("order_time") <= end_date))
if region_filter:
filtered_df = filtered_df.filter(col("delivery_region") == region_filter)
filtered_df = filtered_df.withColumn("delivery_duration", (unix_timestamp("delivered_time") - unix_timestamp("order_time")) / 60)
avg_duration = filtered_df.agg(avg("delivery_duration").alias("avg_duration")).collect()[0]["avg_duration"]
delivery_stats = filtered_df.groupBy("delivery_region").agg(count("order_id").alias("order_count"), avg("delivery_duration").alias("avg_duration"), max("delivery_duration").alias("max_duration"), min("delivery_duration").alias("min_duration"))
hourly_stats = filtered_df.withColumn("hour", hour("order_time")).groupBy("hour").agg(count("order_id").alias("hourly_orders"), avg("delivery_duration").alias("hourly_avg_duration")).orderBy("hour")
efficiency_df = filtered_df.groupBy("delivery_person_id").agg(count("order_id").alias("total_orders"), avg("delivery_duration").alias("personal_avg_duration"), sum("delivery_distance").alias("total_distance"))
top_performers = efficiency_df.filter(col("total_orders") >= 10).orderBy(col("personal_avg_duration").asc()).limit(10)
delivery_stats_list = [row.asDict() for row in delivery_stats.collect()]
hourly_stats_list = [row.asDict() for row in hourly_stats.collect()]
top_performers_list = [row.asDict() for row in top_performers.collect()]
result = {"average_delivery_time": round(avg_duration, 2), "regional_statistics": delivery_stats_list, "hourly_distribution": hourly_stats_list, "top_delivery_personnel": top_performers_list, "total_analyzed_orders": filtered_df.count()}
return JsonResponse(result)
def analyze_food_popularity(request):
"""美食数据分析核心功能"""
if request.method == 'POST':
data = json.loads(request.body)
analysis_period = data.get('period', 30)
category_filter = data.get('category', None)
orders_df = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/takeaway").option("dbtable", "orders").option("user", "root").option("password", "password").load()
food_df = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/takeaway").option("dbtable", "food_items").option("user", "root").option("password", "password").load()
order_items_df = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/takeaway").option("dbtable", "order_items").option("user", "root").option("password", "password").load()
cutoff_date = (datetime.now() - timedelta(days=analysis_period)).strftime('%Y-%m-%d')
recent_orders = orders_df.filter(col("order_time") >= cutoff_date)
food_orders = recent_orders.join(order_items_df, "order_id").join(food_df, "food_id")
if category_filter:
food_orders = food_orders.filter(col("category") == category_filter)
popularity_stats = food_orders.groupBy("food_id", "food_name", "category", "price").agg(sum("quantity").alias("total_sold"), count("order_id").alias("order_frequency"), sum(col("quantity") * col("price")).alias("total_revenue"))
top_selling_foods = popularity_stats.orderBy(col("total_sold").desc()).limit(20)
revenue_leaders = popularity_stats.orderBy(col("total_revenue").desc()).limit(15)
category_performance = food_orders.groupBy("category").agg(sum("quantity").alias("category_volume"), countDistinct("food_id").alias("variety_count"), avg("price").alias("avg_price"))
price_range_analysis = food_orders.withColumn("price_range", when(col("price") < 20, "低价位").when(col("price") < 50, "中价位").otherwise("高价位")).groupBy("price_range").agg(sum("quantity").alias("range_sales"), avg("price").alias("range_avg_price"))
temporal_trends = food_orders.withColumn("order_date", to_date("order_time")).groupBy("order_date").agg(sum("quantity").alias("daily_sales"), countDistinct("food_id").alias("daily_variety")).orderBy("order_date")
correlation_matrix = food_orders.groupBy("food_id").agg(avg("price").alias("avg_price"), sum("quantity").alias("total_quantity")).toPandas()
price_sales_corr = correlation_matrix['avg_price'].corr(correlation_matrix['total_quantity']) if len(correlation_matrix) > 1 else 0
top_selling_list = [row.asDict() for row in top_selling_foods.collect()]
revenue_leaders_list = [row.asDict() for row in revenue_leaders.collect()]
category_stats = [row.asDict() for row in category_performance.collect()]
price_analysis = [row.asDict() for row in price_range_analysis.collect()]
trends_data = [row.asDict() for row in temporal_trends.collect()]
result = {"top_selling_foods": top_selling_list, "revenue_leaders": revenue_leaders_list, "category_performance": category_stats, "price_range_analysis": price_analysis, "sales_trends": trends_data, "price_sales_correlation": round(price_sales_corr, 3), "analysis_period_days": analysis_period}
return JsonResponse(result)
def generate_order_management_report(request):
"""订单管理统计核心功能"""
if request.method == 'POST':
data = json.loads(request.body)
report_type = data.get('report_type', 'daily')
date_range = data.get('date_range', 7)
orders_df = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/takeaway").option("dbtable", "orders").option("user", "root").option("password", "password").load()
users_df = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/takeaway").option("dbtable", "users").option("user", "root").option("password", "password").load()
restaurants_df = spark.read.format("jdbc").option("url", "jdbc:mysql://localhost:3306/takeaway").option("dbtable", "restaurants").option("user", "root").option("password", "password").load()
start_date = (datetime.now() - timedelta(days=date_range)).strftime('%Y-%m-%d')
filtered_orders = orders_df.filter(col("order_time") >= start_date)
enhanced_orders = filtered_orders.join(users_df, "user_id", "left").join(restaurants_df, "restaurant_id", "left")
order_status_stats = enhanced_orders.groupBy("order_status").agg(count("order_id").alias("status_count"), sum("total_amount").alias("status_revenue"))
if report_type == 'daily':
time_column = to_date("order_time")
group_format = "yyyy-MM-dd"
elif report_type == 'hourly':
time_column = date_format("order_time", "yyyy-MM-dd HH")
group_format = "yyyy-MM-dd HH"
else:
time_column = date_format("order_time", "yyyy-MM")
group_format = "yyyy-MM"
temporal_stats = enhanced_orders.withColumn("time_period", date_format("order_time", group_format)).groupBy("time_period").agg(count("order_id").alias("period_orders"), sum("total_amount").alias("period_revenue"), avg("total_amount").alias("avg_order_value"), countDistinct("user_id").alias("unique_customers"))
restaurant_performance = enhanced_orders.groupBy("restaurant_id", "restaurant_name").agg(count("order_id").alias("restaurant_orders"), sum("total_amount").alias("restaurant_revenue"), avg("total_amount").alias("avg_order_size"), countDistinct("user_id").alias("customer_base"))
top_restaurants = restaurant_performance.orderBy(col("restaurant_revenue").desc()).limit(10)
customer_analysis = enhanced_orders.groupBy("user_id").agg(count("order_id").alias("user_orders"), sum("total_amount").alias("user_spending"), avg("total_amount").alias("avg_spending_per_order"))
customer_segments = customer_analysis.withColumn("customer_type", when(col("user_orders") >= 20, "高频用户").when(col("user_orders") >= 5, "中频用户").otherwise("低频用户")).groupBy("customer_type").agg(count("user_id").alias("segment_size"), avg("user_spending").alias("avg_segment_spending"))
payment_method_stats = enhanced_orders.groupBy("payment_method").agg(count("order_id").alias("payment_count"), sum("total_amount").alias("payment_revenue"))
cancellation_analysis = enhanced_orders.filter(col("order_status") == "已取消").groupBy("cancellation_reason").agg(count("order_id").alias("cancellation_count"))
overall_metrics = enhanced_orders.agg(count("order_id").alias("total_orders"), sum("total_amount").alias("total_revenue"), avg("total_amount").alias("average_order_value"), countDistinct("user_id").alias("active_customers"), countDistinct("restaurant_id").alias("active_restaurants")).collect()[0]
result = {"overall_metrics": overall_metrics.asDict(), "order_status_distribution": [row.asDict() for row in order_status_stats.collect()], "temporal_analysis": [row.asDict() for row in temporal_stats.orderBy("time_period").collect()], "top_performing_restaurants": [row.asDict() for row in top_restaurants.collect()], "customer_segmentation": [row.asDict() for row in customer_segments.collect()], "payment_preferences": [row.asDict() for row in payment_method_stats.collect()], "cancellation_reasons": [row.asDict() for row in cancellation_analysis.collect()], "report_period_days": date_range, "report_type": report_type}
return JsonResponse(result)
外卖配送分析与可视化系统文档展示
💖💖作者:计算机毕业设计杰瑞 💙💙个人简介:曾长期从事计算机专业培训教学,本人也热爱上课教学,语言擅长Java、微信小程序、Python、Golang、安卓Android等,开发项目包括大数据、深度学习、网站、小程序、安卓、算法。平常会做一些项目定制化开发、代码讲解、答辩教学、文档编写、也懂一些降重方面的技巧。平常喜欢分享一些自己开发中遇到的问题的解决办法,也喜欢交流技术,大家有技术代码这一块的问题可以问我! 💛💛想说的话:感谢大家的关注与支持! 💜💜 网站实战项目 安卓/小程序实战项目 大数据实战项目 深度学校实战项目 计算机毕业设计选题推荐