Skip to content

Commit

Permalink
second
Browse files Browse the repository at this point in the history
  • Loading branch information
Massacre96Wj authored Dec 5, 2020
1 parent c1ba148 commit faf1ad6
Showing 1 changed file with 71 additions and 18 deletions.
89 changes: 71 additions & 18 deletions 集中式/center1.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,25 @@
@Date :2020/11/29 10:10
@Desc :
"""
import datetime
import os
import time

import torch
from scipy.interpolate import make_interp_spline
from statsmodels.tsa.seasonal import seasonal_decompose
from torch import nn
import numpy
import pandas as pd
from pandas import read_csv
from sklearn.preprocessing import MinMaxScaler
from functools import reduce
from log import logger
from tensorboardX import SummaryWriter
import matplotlib.pyplot as plt

look_back = 4
EPOCH = 10
SIZE = 50
SIZE = 150
dataset = None
Nums = 0

Expand Down Expand Up @@ -54,13 +60,27 @@ def evluation(real, pred):
# return data_X

# 初始化数据
dataset = numpy.zeros((30, 50), dtype=float)
dataset[1] = numpy.random.poisson(lam=20, size=SIZE)
dataset[2] = numpy.random.poisson(lam=10, size=SIZE)
dataset[4] = numpy.random.poisson(lam=30, size=SIZE)
dataset = numpy.zeros((30, SIZE), dtype=float)
d1 = numpy.random.poisson(lam=20.0, size=SIZE)
dataset[1] = d1
dataset[2] = numpy.random.poisson(lam=20.0, size=SIZE)
dataset[4] = numpy.random.poisson(lam=20.0, size=SIZE)

# 返回包含三个部分 trend(趋势部分),seasonal(季节性部分)和residual (残留部分)
def decompose(timeseries):
decomposition = seasonal_decompose(timeseries, freq=3)
trend = decomposition.trend
seasonal = decomposition.seasonal
residual = decomposition.resid
return trend, seasonal, residual

def get_data(node_id, time_series):
time_series = numpy.array(time_series)
dataset[node_id - 1] = time_series
time_series = pd.Series(numpy.log(time_series + 1).reshape(-1), index=pd.DatetimeIndex(range(len(time_series))))
trend, seasonal, residual = decompose(time_series)
ts = trend + seasonal
ts = ts.fillna(numpy.nanmean(numpy.array(ts)))
dataset[node_id - 1] = ts.values

# 判断参与的节点有哪些
def join_train(dataset=dataset):
Expand Down Expand Up @@ -99,7 +119,9 @@ def forward(self, x):
x = x.view(s, b, -1)
return x


class TrainPredict(object):

def __init__(self):
self.var_x, self.var_y, self.model = None, None, None
self.Node = []
Expand All @@ -112,7 +134,7 @@ def transfrom(self):
data_X, data_Y = load_data1(data.reshape(-1, 1))
dfs_x.append(data_X)
dfs_y.append(data_Y.reshape(-1, 1))
dataset = dataset = numpy.zeros((30, 50), dtype=float)
dataset = dataset = numpy.zeros((30, SIZE), dtype=float)
data_X = reduce(lambda left, right: numpy.concatenate((left, right), axis=1), dfs_x)
data_Y = reduce(lambda left, right: numpy.concatenate((left, right), axis=1), dfs_y)
train_X = data_X
Expand All @@ -121,27 +143,52 @@ def transfrom(self):
train_Y = train_Y.reshape(-1, 1, train_Y.shape[1])
self.var_x = torch.from_numpy(train_X)
self.var_y = torch.from_numpy(train_Y)
self.model = LSTM(train_X.shape[-1], 64, Nums, 1)
return train_X.shape[-1]
# print(self.model)
print(dataset)
# print(dataset)

def train(self):
self.transfrom()
input_shape = self.transfrom()

self.model = LSTM(input_shape, Nums*look_back*4, Nums, 1)
loss_fun = nn.MSELoss()
optimizer = torch.optim.Adam(self.model.parameters(), lr=0.05)

writer = SummaryWriter("logs")
writer.add_graph(self.model, input_to_model=torch.zeros(1, 1, Nums*look_back), verbose=True)
loss_plot = [1]
time_plot = [0]
first = 0
for epoch in range(1, EPOCH+1):
logger.info("Epoch: %d......" % epoch)
for t in range(50):
if not first:
first = float(time.time())
writer.add_scalar("train loss", 1.0, 0)
for t in range(25):
# 前向传播
out = self.model(self.var_x)
loss = loss_fun(out, self.var_y)
# 反向传播
optimizer.zero_grad()
loss.backward()
optimizer.step()
# tensorboard --logdir=logs
writer.add_scalar("train loss", loss.item(), float(time.time()*1000) - first*1000)
time_plot.append(float(time.time()) - first)
loss_plot.append(loss.item())

x_smooth = numpy.linspace(min(time_plot), max(time_plot), 300)
y_smooth = make_interp_spline(numpy.array(time_plot), numpy.array(loss_plot))(x_smooth)
plt.plot(x_smooth, y_smooth)
plt.savefig('plot.jpg')
plt.show()

return time_plot, loss_plot


# 预测结果,传入参数需要得到预测值的个数
def center_predict(self, node_id, time=50):
def center_predict(self, node_id, time=150):
torch.save(self.model, "model.pkl")
# self.train()
# 返回二维数组,索引减一代表节点号对应的预测结果值
predicts = []
Expand All @@ -150,8 +197,10 @@ def center_predict(self, node_id, time=50):
# time是需要的结果个数
for _ in range(time):
predict = self.model(cur_test_x)
predicts.append(predict.data.numpy().reshape(-1).tolist())
# for j in range(len(predict[0][0])):
# predict[0][0][j] = cur_test_x[0][0][j: j + look_back].mean() * 0.8 + predict[0][0][j] * 0.2

predicts.append(predict.data.numpy().reshape(-1).tolist())
# 更改下一时刻的特征
cur = []
for i in range(1, Nums*look_back+1):
Expand All @@ -160,14 +209,18 @@ def center_predict(self, node_id, time=50):
else:
cur.append(predict[0][0][i//look_back-1].data)
cur_test_x = torch.from_numpy(numpy.array(cur)).view(-1, 1, look_back*Nums)

predicts = scaler.inverse_transform(numpy.array(predicts).reshape(Nums, -1))
print(predicts.shape)

result = numpy.zeros((30, time))
for i, node in enumerate(self.Node):
result[node] = predicts[i]
return result[node_id]

tp = TrainPredict()
tp.train()
result = tp.center_predict(1, 50)
print(result)
time_plot, loss_plot = tp.train()

result = tp.center_predict(1, 150)
r = numpy.concatenate((d1, result), axis=0)


0 comments on commit faf1ad6

Please sign in to comment.