Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion qlib/data/dataset/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,12 @@ def load(self, instruments=None, start_time=None, end_time=None) -> pd.DataFrame
df = self._data.loc(axis=0)[:, instruments]
if start_time is None and end_time is None:
return df # NOTE: avoid copy by loc
return df.loc[pd.Timestamp(start_time) : pd.Timestamp(end_time)]
# pd.Timestamp(None) == NaT, use NaT as index can not fetch correct thing, so do not change None.
if start_time is not None:
start_time = pd.Timestamp(start_time)
if end_time is not None:
end_time = pd.Timestamp(end_time)
return df.loc[start_time:end_time]

def _maybe_load_raw_data(self):
if self._data is not None:
Expand Down
2 changes: 1 addition & 1 deletion qlib/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def get_module_logger(module_name, level: Optional[int] = None) -> logging.Logge

class TimeInspector:

timer_logger = get_module_logger("timer", level=logging.WARNING)
timer_logger = get_module_logger("timer", level=logging.INFO)

time_marks = []

Expand Down
6 changes: 3 additions & 3 deletions qlib/utils/serial.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,16 +92,16 @@ def to_pickle(self, path: Union[Path, str], dump_all: bool = None, exclude: list
@classmethod
def load(cls, filepath):
"""
Load the collector from a filepath.
Load the serializable class from a filepath.

Args:
filepath (str): the path of file

Raises:
TypeError: the pickled file must be `Collector`
TypeError: the pickled file must be `type(cls)`

Returns:
Collector: the instance of Collector
`type(cls)`: the instance of `type(cls)`
"""
with open(filepath, "rb") as f:
object = cls.get_backend().load(f)
Expand Down
5 changes: 3 additions & 2 deletions qlib/workflow/online/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@

from qlib.log import get_module_logger
from qlib.utils import get_cls_kwargs
from qlib.utils.exceptions import QlibException
from qlib.workflow.online.update import PredUpdater
from qlib.workflow.recorder import Recorder
from qlib.workflow.task.utils import list_recorders
Expand Down Expand Up @@ -191,9 +192,9 @@ def update_online_pred(self, to_date=None, exp_name: str = None):
hist_ref = kwargs.get("step_len", TSDatasetH.DEFAULT_STEP_LEN)
try:
updater = PredUpdater(rec, to_date=to_date, hist_ref=hist_ref)
except OSError:
except QlibException as e:
# skip the recorder without pred
self.logger.warn(f"Can't find `pred.pkl`, skip it.")
self.logger.warn(f"An exception `{str(e)}` happened when load `pred.pkl`, skip it.")
continue
updater.update()

Expand Down
12 changes: 9 additions & 3 deletions qlib/workflow/recorder.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
import shutil, os, pickle, tempfile, codecs, pickle
from pathlib import Path
from datetime import datetime

from mlflow.exceptions import MlflowException
from qlib.utils.exceptions import QlibException
from ..utils.objm import FileManager
from ..log import get_module_logger

Expand Down Expand Up @@ -308,9 +311,12 @@ def save_objects(self, local_path=None, artifact_path=None, **kwargs):

def load_object(self, name):
assert self.uri is not None, "Please start the experiment and recorder first before using recorder directly."
path = self.client.download_artifacts(self.id, name)
with Path(path).open("rb") as f:
return pickle.load(f)
try:
path = self.client.download_artifacts(self.id, name)
with Path(path).open("rb") as f:
return pickle.load(f)
except OSError as e:
raise QlibException(message=str(e))
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This exception should be defined in the interface of Recorder


def log_params(self, **kwargs):
for name, data in kwargs.items():
Expand Down