当前位置: 首页 > news >正文

GM、BP、LSTM时间预测预测代码

GM 

clc; clear; close all;%% 数据加载和预处理
[file, path] = uigetfile('*.xlsx', 'Select the Excel file');
filename = fullfile(path, file);
time_series = xlsread(filename);% 确保数据是一列
time_series = time_series(:);% 归一化数据
min_val = min(time_series);
max_val = max(time_series);
normalized_data = (time_series - min_val) / (max_val - min_val);% 训练/测试集划分
train_ratio = 0.8;
n = length(normalized_data);
M = floor(n * train_ratio);
train_data = normalized_data(1:M);
test_data = normalized_data(M+1:end);%% 构建 GM(1,1) 模型并预测
% 1. 原始序列
X0 = train_data(:); % 确保为列向量% 2. 累加生成序列(AGO)
X1 = cumsum(X0);% 3. 构造数据矩阵并计算参数
B = [-0.5 * (X1(1:end-1) + X1(2:end)), ones(M-1, 1)];
Y = X0(2:end);
U = (B \ Y); % 参数 a 和 ba = U(1); % 模型参数 a
b = U(2); % 模型参数 b% 4. 预测公式
X1_pred = zeros(M, 1);
X1_pred(1) = X0(1); % 初值
for k = 2:MX1_pred(k) = (X0(1) - b/a) * exp(-a * (k-1)) + b/a;
end% 还原到原始序列
X0_pred = [X1_pred(1); diff(X1_pred)];% 对测试集进行预测
N = length(test_data);
X_test_pred = zeros(N, 1);
X_test_pred(1) = X0_pred(end); % 使用最后一个训练值初始化
for k = 2:NX_test_pred(k) = (X_test_pred(1) - b/a) * exp(-a * (M + k - 2)) + b/a;
end%% 反归一化数据
T_train = train_data * (max_val - min_val) + min_val; % 真实训练集
T_test = test_data * (max_val - min_val) + min_val;   % 真实测试集
T_train_pred = X0_pred * (max_val - min_val) + min_val; % 训练预测值
T_test_pred = X_test_pred * (max_val - min_val) + min_val; % 测试预测值%% 性能评估
% R² (决定系数)
R1 = 1 - norm(T_train - T_train_pred)^2 / norm(T_train - mean(T_train))^2;
R2 = 1 - norm(T_test - T_test_pred)^2 / norm(T_test - mean(T_test))^2;% MAE (平均绝对误差)
mae1 = mean(abs(T_train - T_train_pred));
mae2 = mean(abs(T_test - T_test_pred));% MAPE (平均相对误差)
mape1 = mean(abs((T_train - T_train_pred) ./ T_train));
mape2 = mean(abs((T_test - T_test_pred) ./ T_test));% MBE (平均偏差误差)
mbe1 = mean(T_train - T_train_pred);  % 训练集 MBE
mbe2 = mean(T_test - T_test_pred);    % 测试集 MBE% MSE (均方误差)
mse1 = mean((T_train - T_train_pred).^2);  % 训练集 MSE
mse2 = mean((T_test - T_test_pred).^2);    % 测试集 MSE% 显示性能指标
disp(['训练集 R²: ', num2str(R1)]);
disp(['测试集 R²: ', num2str(R2)]);
disp(['训练集 MAE: ', num2str(mae1)]);
disp(['测试集 MAE: ', num2str(mae2)]);
disp(['训练集 MAPE: ', num2str(mape1)]);
disp(['测试集 MAPE: ', num2str(mape2)]);
disp(['训练集 MBE: ', num2str(mbe1)]);
disp(['测试集 MBE: ', num2str(mbe2)]);
disp(['训练集 MSE: ', num2str(mse1)]);
disp(['测试集 MSE: ', num2str(mse2)]);%% 可视化
% 训练集预测结果
figure;
plot(1:M, T_train, '-', 'LineWidth', 2, 'Color', [0 0 1]); % 真实值(蓝色)
hold on;
plot(1:M, T_train_pred, '--', 'LineWidth', 2, 'Color', [1 0 0]); % 预测值(红色)
legend('True', 'Predicted', 'Location', 'best');
xlabel('Samples');
ylabel('Values');
title('Training Set Prediction');
grid on;% 测试集预测结果
figure;
plot(1:N, T_test, '-', 'LineWidth', 2, 'Color', [0 0 1]); % 真实值(蓝色)
hold on;
plot(1:N, T_test_pred, '--', 'LineWidth', 2, 'Color', [1 0 0]); % 预测值(红色)
legend('True', 'Predicted', 'Location', 'best');
xlabel('Samples');
ylabel('Values');
title('Test Set Prediction');
grid on;disp('GM(1,1) 预测完成!');

BP

clc; clear; close all;% Load and preprocess data from Excel
[file, path] = uigetfile('*.xlsx', 'Select the Excel file');
filename = fullfile(path, file);
time_series = xlsread(filename);% Normalize data
min_val = min(time_series);
max_val = max(time_series);
normalized_data = (time_series - min_val) / (max_val - min_val);% Training/Test split
train_ratio = 0.8;
n = length(normalized_data);
M = floor(n * train_ratio);
train_data = normalized_data(1:M);
test_data = normalized_data(M+1:end);% Prepare data for BP network
P_train = train_data(1:end-1)';
T_train = train_data(2:end)';
P_test = test_data(1:end-1)';
T_test = test_data(2:end)';% Create and train BP neural network
net = feedforwardnet(10, 'trainlm'); % 10 hidden neurons
net.trainParam.epochs = 1000;
net.trainParam.goal = 1e-5;% Train the network
net = train(net, P_train, T_train);% Prediction
T_sim1 = net(P_train); % Training set prediction
T_sim2 = net(P_test);  % Test set prediction% Denormalize predictions
T_sim1 = T_sim1 * (max_val - min_val) + min_val;
T_sim2 = T_sim2 * (max_val - min_val) + min_val;
T_train = T_train * (max_val - min_val) + min_val;
T_test = T_test * (max_val - min_val) + min_val;% Ensure dimensions match
if size(T_sim1, 1) ~= size(T_train, 1)T_sim1 = T_sim1'; % Ensure T_sim1 is a column vector
endif size(T_sim2, 1) ~= size(T_test, 1)T_sim2 = T_sim2'; % Ensure T_sim2 is a column vector
end%% Compute Metrics% R-squared (R²)
R2_train = 1 - sum((T_train - T_sim1).^2) / sum((T_train - mean(T_train)).^2);
R2_test = 1 - sum((T_test - T_sim2).^2) / sum((T_test - mean(T_test)).^2);% Mean Absolute Error (MAE)
MAE_train = sum(abs(T_train - T_sim1)) / length(T_train);
MAE_test = sum(abs(T_test - T_sim2)) / length(T_test);% Mean Absolute Percentage Error (MAPE)
MAPE_train = mean(abs((T_train - T_sim1) ./ T_train)) * 100;
MAPE_test = mean(abs((T_test - T_sim2) ./ T_test)) * 100;% Mean Bias Error (MBE)
MBE_train = sum(T_sim1 - T_train) / length(T_train);
MBE_test = sum(T_sim2 - T_test) / length(T_test);% Mean Squared Error (MSE)
MSE_train = sum((T_train - T_sim1).^2) / length(T_train);
MSE_test = sum((T_test - T_sim2).^2) / length(T_test);% Display Metrics
disp(['Training Set R²: ', num2str(R2_train)]);
disp(['Test Set R²: ', num2str(R2_test)]);
disp(['Training Set MAE: ', num2str(MAE_train)]);
disp(['Test Set MAE: ', num2str(MAE_test)]);
disp(['Training Set MAPE: ', num2str(MAPE_train), '%']);
disp(['Test Set MAPE: ', num2str(MAPE_test), '%']);
disp(['Training Set MBE: ', num2str(MBE_train)]);
disp(['Test Set MBE: ', num2str(MBE_test)]);
disp(['Training Set MSE: ', num2str(MSE_train)]);
disp(['Test Set MSE: ', num2str(MSE_test)]);%% Generate Plots% Training Set Prediction Plot
figure;
plot(1:length(T_train), T_train, '-', 'LineWidth', 2, 'Color', [0 0 1]); % True values (blue line)
hold on;
plot(1:length(T_train), T_sim1, '--', 'LineWidth', 2, 'Color', [1 0 0]); % Predicted values (red dashed line)
legend('True', 'Predicted', 'Location', 'best');
xlabel('Samples');
ylabel('Values');
title('Training Set Prediction');
grid on;% Test Set Prediction Plot
figure;
plot(1:length(T_test), T_test, '-', 'LineWidth', 2, 'Color', [0 0 1]); % True values (blue line)
hold on;
plot(1:length(T_test), T_sim2, '--', 'LineWidth', 2, 'Color', [1 0 0]); % Predicted values (red dashed line)
legend('True', 'Predicted', 'Location', 'best');
xlabel('Samples');
ylabel('Values');
title('Test Set Prediction');
grid on;disp('Model evaluation completed!');

LSTM

clc; clear; close all;% Load and preprocess data from Excel
[file, path] = uigetfile('*.xlsx', 'Select the Excel file');
filename = fullfile(path, file);
time_series = xlsread(filename);% Normalize data
min_val = min(time_series);
max_val = max(time_series);
normalized_data = (time_series - min_val) / (max_val - min_val);% Training/Test split
train_ratio = 0.8;
n = length(normalized_data);
M = floor(n * train_ratio);
train_data = normalized_data(1:M);
test_data = normalized_data(M+1:end);% Prepare data for LSTM network
X_train = train_data(1:end-1); % Input data for training
Y_train = train_data(2:end);   % Target data for training
X_test = test_data(1:end-1);   % Input data for testing
Y_test = test_data(2:end);     % Target data for testing% Reshape data for LSTM input
X_train = reshape(X_train, [1, length(X_train), 1]); % [features, time_steps, samples]
Y_train = reshape(Y_train, [1, length(Y_train), 1]); % [features, time_steps, samples]
X_test = reshape(X_test, [1, length(X_test), 1]);
Y_test = reshape(Y_test, [1, length(Y_test), 1]);% Define LSTM network architecture
inputSize = 1; % One feature (time series value)
numHiddenUnits = 50; % Number of hidden units in the LSTM layer
outputSize = 1; % Output sizelayers = [sequenceInputLayer(inputSize) % Input layerlstmLayer(numHiddenUnits, 'OutputMode', 'sequence') % LSTM layerfullyConnectedLayer(outputSize) % Fully connected layerregressionLayer % Regression output layer
];% Training options
options = trainingOptions('adam', ...'MaxEpochs', 300, ...'GradientThreshold', 1, ...'InitialLearnRate', 0.01, ...'LearnRateSchedule', 'piecewise', ...'LearnRateDropFactor', 0.2, ...'LearnRateDropPeriod', 150, ...'Verbose', 0, ...'Plots', 'training-progress');% Train the LSTM network
net = trainNetwork(X_train, Y_train, layers, options);% Predict on training and test sets
Y_pred_train = predict(net, X_train, 'MiniBatchSize', 1);
Y_pred_test = predict(net, X_test, 'MiniBatchSize', 1);% Reshape predictions back to 1D
Y_pred_train = squeeze(Y_pred_train);
Y_pred_test = squeeze(Y_pred_test);% Denormalize predictions
Y_pred_train = Y_pred_train * (max_val - min_val) + min_val;
Y_pred_test = Y_pred_test * (max_val - min_val) + min_val;
Y_train = squeeze(Y_train) * (max_val - min_val) + min_val;
Y_test = squeeze(Y_test) * (max_val - min_val) + min_val;%% Compute Metrics% R-squared (R²)
R2_train = 1 - sum((Y_train - Y_pred_train).^2) / sum((Y_train - mean(Y_train)).^2);
R2_test = 1 - sum((Y_test - Y_pred_test).^2) / sum((Y_test - mean(Y_test)).^2);% Mean Absolute Error (MAE)
MAE_train = mean(abs(Y_train - Y_pred_train));
MAE_test = mean(abs(Y_test - Y_pred_test));% Mean Absolute Percentage Error (MAPE)
MAPE_train = mean(abs((Y_train - Y_pred_train) ./ Y_train)) * 100;
MAPE_test = mean(abs((Y_test - Y_pred_test) ./ Y_test)) * 100;% Mean Bias Error (MBE)
MBE_train = mean(Y_pred_train - Y_train);
MBE_test = mean(Y_pred_test - Y_test);% Mean Squared Error (MSE)
MSE_train = mean((Y_train - Y_pred_train).^2);
MSE_test = mean((Y_test - Y_pred_test).^2);% Display Metrics
disp(['Training Set R²: ', num2str(R2_train)]);
disp(['Test Set R²: ', num2str(R2_test)]);
disp(['Training Set MAE: ', num2str(MAE_train)]);
disp(['Test Set MAE: ', num2str(MAE_test)]);
disp(['Training Set MAPE: ', num2str(MAPE_train), '%']);
disp(['Test Set MAPE: ', num2str(MAPE_test), '%']);
disp(['Training Set MBE: ', num2str(MBE_train)]);
disp(['Test Set MBE: ', num2str(MBE_test)]);
disp(['Training Set MSE: ', num2str(MSE_train)]);
disp(['Test Set MSE: ', num2str(MSE_test)]);%% Generate Plots% Training Set Prediction Plot
figure;
plot(1:length(Y_train), Y_train, '-', 'LineWidth', 2, 'Color', [0 0 1]); % True values (blue line)
hold on;
plot(1:length(Y_train), Y_pred_train, '--', 'LineWidth', 2, 'Color', [1 0 0]); % Predicted values (red dashed line)
legend('True', 'Predicted', 'Location', 'best');
xlabel('Samples');
ylabel('Values');
title('Training Set Prediction');
grid on;% Test Set Prediction Plot
figure;
plot(1:length(Y_test), Y_test, '-', 'LineWidth', 2, 'Color', [0 0 1]); % True values (blue line)
hold on;
plot(1:length(Y_test), Y_pred_test, '--', 'LineWidth', 2, 'Color', [1 0 0]); % Predicted values (red dashed line)
legend('True', 'Predicted', 'Location', 'best');
xlabel('Samples');
ylabel('Values');
title('Test Set Prediction');
grid on;disp('LSTM model evaluation completed!');

 

http://www.lryc.cn/news/490615.html

相关文章:

  • 《操作系统 - 清华大学》4 -5:非连续内存分配:页表一反向页表
  • 志愿者小程序源码社区网格志愿者服务小程序php
  • Java语言编程,通过阿里云mongo数据库监控实现数据库的连接池优化
  • 使用ufw配置防火墙,允许特定范围IP访问
  • 实现 UniApp 右上角按钮“扫一扫”功能实战教学
  • 【2024亚太杯亚太赛APMCM C题】数学建模竞赛|宠物行业及相关产业的发展分析与策略|建模过程+完整代码论文全解全析
  • ubtil循环函数调用
  • 使用EFK收集k8s日志
  • 聚水潭与MySQL数据集成案例分享
  • Python 版本的 2024详细代码
  • SpringCloud框架学习(第四部分:Gateway网关)
  • C++ 类和对象 (上 )
  • HAProxy面试题及参考答案(精选80道面试题)
  • 探索PyCaret:一个简化机器学习的全栈库
  • 英语写作中“联系、关联”associate correlate 及associated的用法
  • 深度学习之目标检测的技巧汇总
  • 【Flask+Gunicorn+Nginx】部署目标检测模型API完整解决方案
  • Spark核心组件解析:Executor、RDD与缓存优化
  • “AI玩手机”原理揭秘:大模型驱动的移动端GUI智能体
  • 离散数学【关系】中的一些特殊关系
  • docker 配置代理
  • Dockerfile详解:构建简单高效的容器镜像
  • RHCD-----shell
  • <硬件有关> 内存攒机认知入门,内存的选择 配置 laptop PC 服务器
  • 基于springboot的来访管理系统的设计与实现
  • window11编译pycdc.exe
  • 11.22.2024 面试后记
  • Bug Fix 20241122:缺少lib文件错误
  • Pinia 实战教程:构建高效的 Vue 3 状态管理系统
  • springboot3如何集成knife4j 4.x版本及如何进行API注解