File size: 4,943 Bytes
c1f29cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import torch
import torch.nn as nn
import torch.optim as optim
import pandas as pd
import numpy as np
from sklearn.preprocessing import MinMaxScaler
from datetime import datetime


class WeatherPredictor:
    def __init__(self, data_path):
        # Load and preprocess data
        self.df = pd.read_csv(data_path, parse_dates=['datetime'],
                              date_parser=lambda x: datetime.strptime(x, '%d/%m/%y'))
        self.df['day'] = self.df['datetime'].dt.day
        self.df['month'] = self.df['datetime'].dt.month
        self.df['year'] = self.df['datetime'].dt.year
        self.df['day_sin'] = np.sin(2 * np.pi * self.df['day'] / 31)
        self.df['day_cos'] = np.cos(2 * np.pi * self.df['day'] / 31)
        self.df['month_sin'] = np.sin(2 * np.pi * self.df['month'] / 12)
        self.df['month_cos'] = np.cos(2 * np.pi * self.df['month'] / 12)
        self.df['year'] = self.df['datetime'].dt.year

        features = ['day_sin', 'day_cos', 'month_sin', 'month_cos', 'year']
        target_columns = ['temp', 'precip', 'snow', 'windspeed']

        # Scale features and targets
        self.feature_scaler = MinMaxScaler()
        self.target_scaler = MinMaxScaler()

        X = self.feature_scaler.fit_transform(self.df[features])
        Y = self.target_scaler.fit_transform(self.df[target_columns])

        self.X_tensor = torch.FloatTensor(X)
        self.Y_tensor = torch.FloatTensor(Y)

        # Single model for all targets
        input_dim = len(features)
        self.model = nn.Sequential(
            nn.Linear(input_dim, 16),
            nn.ReLU(),
            nn.Linear(16, 8),
            nn.ReLU(),
            nn.Linear(8, 4)
        )

    def train(self, epochs=1000):
        # Define loss function and optimizer
        criterion = nn.MSELoss()
        optimizer = optim.Adam(self.model.parameters(), lr=0.01)

        for epoch in range(epochs):
            # Forward pass
            outputs = self.model(self.X_tensor)  # Multi-output predictions
            loss = criterion(outputs, self.Y_tensor)

            # Backward pass and optimize
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

            if epoch % 100 == 0:
                print(f'Epoch [{epoch}/{epochs}], Loss: {loss.item():.4f}')

        # Save the model after training
        self.save_model('weather_predictor.pth')

    def predict(self, input_date):
        # Convert input date to features
        date = datetime.strptime(input_date, '%d/%m/%y')
        features = [
            np.sin(2 * np.pi * date.day / 31),
            np.cos(2 * np.pi * date.day / 31),
            np.sin(2 * np.pi * date.month / 12),
            np.cos(2 * np.pi * date.month / 12),
            date.year
        ]

        # Transform features to match training scale
        scaled_features = self.feature_scaler.transform([features])
        input_tensor = torch.FloatTensor(scaled_features)

        # Predict outputs
        with torch.no_grad():
            scaled_predictions = self.model(input_tensor).numpy()  # Outputs: [temp, precip, snow, windspeed]
            predictions = self.target_scaler.inverse_transform(scaled_predictions.reshape(1, -1)).flatten()

        # Map predictions to target columns
        target_columns = ['temp', 'precip', 'snow', 'windspeed']
        return dict(zip(target_columns, predictions))


    def predict(self, input_date):
        # Convert input date to features
        date = datetime.strptime(input_date, '%d/%m/%y')
        features = [
            np.sin(2 * np.pi * date.day / 31),
            np.cos(2 * np.pi * date.day / 31),
            np.sin(2 * np.pi * date.month / 12),
            np.cos(2 * np.pi * date.month / 12),
            date.year
        ]

        # Transform features to match training scale
        scaled_features = self.feature_scaler.transform([features])
        input_tensor = torch.FloatTensor(scaled_features)

        # Load the model before making predictions
        self.load_model('weather_predictor.pth')

        # Predict outputs
        with torch.no_grad():
            scaled_predictions = self.model(input_tensor).numpy()  # Outputs: [temp, precip, snow, windspeed]
            predictions = self.target_scaler.inverse_transform(scaled_predictions.reshape(1, -1)).flatten()

        # Map predictions to target columns
        target_columns = ['temp', 'precip', 'snow', 'windspeed']
        return dict(zip(target_columns, predictions))

    def save_model(self, file_path):
        torch.save(self.model.state_dict(), file_path)

    def load_model(self, file_path):
        self.model.load_state_dict(torch.load(file_path))
        self.model.eval()

def main():
    predictor = WeatherPredictor('basel-weather.csv')
    predictor.train()

    # Predict for a specific date
    result = predictor.predict('01/02/23')
    print("Predictions:", result)


if __name__ == '__main__':
    main()