52 lines
1.6 KiB
Python
52 lines
1.6 KiB
Python
# %% Import required packages
|
|
import torch
|
|
from src.models.transformer_model import TransformerModel
|
|
from src.models.rl_model import RLModel
|
|
from models.trading_model import TradingAgent
|
|
from src.training.train import train_transformer, train_rl
|
|
from src.evaluation.evaluate import evaluate_trading_agent
|
|
from src.data.data_preprocessing import load_processed_data
|
|
from sklearn.model_selection import ParameterGrid
|
|
import json
|
|
|
|
# %% Set device
|
|
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
|
# %% Load processed data
|
|
data = load_processed_data('./data/processed/processed_data.csv')
|
|
|
|
# %% Define hyperparameters grid
|
|
param_grid = {
|
|
'learning_rate': [0.001, 0.01],
|
|
'batch_size': [32, 64],
|
|
'epochs': [10, 50]
|
|
}
|
|
param_grid = list(ParameterGrid(param_grid))
|
|
|
|
# %% Initialize models
|
|
transformer_model = TransformerModel().to(device)
|
|
rl_model = RLModel().to(device)
|
|
trading_agent = TradingAgent(transformer_model, rl_model)
|
|
|
|
# %% Hyperparameters tuning
|
|
results = []
|
|
|
|
for params in param_grid:
|
|
# Train Transformer Model
|
|
train_transformer(transformer_model, data, params)
|
|
# Train RL Model
|
|
train_rl(trading_agent, data, params)
|
|
# Evaluate the trading agent
|
|
evaluation_results = evaluate_trading_agent(trading_agent, data)
|
|
# Append results
|
|
results.append({
|
|
'params': params,
|
|
'evaluation_results': evaluation_results
|
|
})
|
|
print(f"Params: {params}, Evaluation Results: {evaluation_results}")
|
|
|
|
# %% Save tuning results
|
|
with open('./logs/hyperparameter_tuning_results.json', 'w') as f:
|
|
json.dump(results, f)
|
|
|