forked from microsoft/qlib
-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathdata_mem_resuse_demo.py
More file actions
59 lines (47 loc) · 2.12 KB
/
data_mem_resuse_demo.py
File metadata and controls
59 lines (47 loc) · 2.12 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""
The motivation of this demo
- To show the data modules of Qlib is Serializable, users can dump processed data to disk to avoid duplicated data preprocessing
"""
from copy import deepcopy
from pathlib import Path
import pickle
from pprint import pprint
import subprocess
import yaml
from qlib import init
from qlib.data.dataset.handler import DataHandlerLP
from qlib.log import TimeInspector
from qlib.model.trainer import task_train
from qlib.utils import init_instance_by_config
# For general purpose, we use relative path
DIRNAME = Path(__file__).absolute().resolve().parent
if __name__ == "__main__":
init()
repeat = 2
exp_name = "data_mem_reuse_demo"
config_path = DIRNAME.parent / "benchmarks/LightGBM/workflow_config_lightgbm_Alpha158.yaml"
task_config = yaml.safe_load(config_path.open())
# 1) without using processed data in memory
with TimeInspector.logt("The original time without reusing processed data in memory:"):
for i in range(repeat):
task_train(task_config["task"], experiment_name=exp_name)
# 2) prepare processed data in memory.
hd_conf = task_config["task"]["dataset"]["kwargs"]["handler"]
pprint(hd_conf)
hd: DataHandlerLP = init_instance_by_config(hd_conf)
# 3) with reusing processed data in memory
new_task = deepcopy(task_config["task"])
new_task["dataset"]["kwargs"]["handler"] = hd
print(new_task)
with TimeInspector.logt("The time with reusing processed data in memory:"):
# this will save the time to reload and process data from disk(in `DataHandlerLP`)
# It still takes a lot of time in the backtest phase
for i in range(repeat):
task_train(new_task, experiment_name=exp_name)
# 4) User can change other parts exclude processed data in memory(handler)
new_task = deepcopy(task_config["task"])
new_task["dataset"]["kwargs"]["segments"]["train"] = ("20100101", "20131231")
with TimeInspector.logt("The time with reusing processed data in memory:"):
task_train(new_task, experiment_name=exp_name)