跳到内容

记录运行元数据的回调#

展开以复制 examples/4_advanced_optimizer/3_metadata_callback.py (右上角)
import sys

from ConfigSpace import Configuration, ConfigurationSpace, Float

from smac import HyperparameterOptimizationFacade as HPOFacade
from smac import Scenario
from smac.callback.metadata_callback import MetadataCallback

__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


class Rosenbrock2D:
    @property
    def configspace(self) -> ConfigurationSpace:
        cs = ConfigurationSpace(seed=0)
        x0 = Float("x0", (-5, 10), default=-3)
        x1 = Float("x1", (-5, 10), default=-4)
        cs.add([x0, x1])

        return cs

    def train(self, config: Configuration, seed: int = 0) -> float:
        x1 = config["x0"]
        x2 = config["x1"]

        cost = 100.0 * (x2 - x1**2.0) ** 2.0 + (1 - x1) ** 2.0
        return cost


if __name__ == "__main__":
    model = Rosenbrock2D()

    # Scenario object specifying the optimization "environment"
    scenario = Scenario(model.configspace, n_trials=200)

    # Now we use SMAC to find the best hyperparameters and add the metadata callback defined above
    HPOFacade(
        scenario,
        model.train,
        overwrite=True,
        callbacks=[
            MetadataCallback(
                project_name="My Project Name",
                repository="My Repository Name",
                branch="Name of Active Branch",
                commit="Commit Hash",
                command=" ".join(sys.argv),
                additional_information="Some Additional Information",
            )
        ],
        logging_level=999999,
    ).optimize()

描述#

一个使用回调函数将运行元数据记录到文件的示例。传递给回调函数的任何参数都将在 SMAC 运行开始时记录到一个 json 文件中(参数必须是 json 可序列化的)。

除了每次更改时手动编辑 Git 相关信息(仓库、分支、提交)外,还可以使用 GitPython(通过 pip install GitPython 安装)自动添加这些信息。下面提供了一个通过 GitPython 获取信息的示例

from git import Repo
repo = Repo(".", search_parent_directories=True)
MetadataCallback(
    repository=repo.working_tree_dir.split("/")[-1],
    branch=str(repo.active_branch),
    commit=str(repo.head.commit),
    command=" ".join([sys.argv[0][len(repo.working_tree_dir) + 1:]] + sys.argv[1:]),
)

import sys

from ConfigSpace import Configuration, ConfigurationSpace, Float

from smac import HyperparameterOptimizationFacade as HPOFacade
from smac import Scenario
from smac.callback.metadata_callback import MetadataCallback

__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


class Rosenbrock2D:
    @property
    def configspace(self) -> ConfigurationSpace:
        cs = ConfigurationSpace(seed=0)
        x0 = Float("x0", (-5, 10), default=-3)
        x1 = Float("x1", (-5, 10), default=-4)
        cs.add([x0, x1])

        return cs

    def train(self, config: Configuration, seed: int = 0) -> float:
        x1 = config["x0"]
        x2 = config["x1"]

        cost = 100.0 * (x2 - x1**2.0) ** 2.0 + (1 - x1) ** 2.0
        return cost


if __name__ == "__main__":
    model = Rosenbrock2D()

    # Scenario object specifying the optimization "environment"
    scenario = Scenario(model.configspace, n_trials=200)

    # Now we use SMAC to find the best hyperparameters and add the metadata callback defined above
    HPOFacade(
        scenario,
        model.train,
        overwrite=True,
        callbacks=[
            MetadataCallback(
                project_name="My Project Name",
                repository="My Repository Name",
                branch="Name of Active Branch",
                commit="Commit Hash",
                command=" ".join(sys.argv),
                additional_information="Some Additional Information",
            )
        ],
        logging_level=999999,
    ).optimize()