# Copyright 2025 - Oumi## Licensed under the Apache License, Version 2.0 (the "License");# you may not use this file except in compliance with the License.# You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific language governing permissions and# limitations under the License.fromdataclassesimportdataclassfrompathlibimportPathfromtypingimportOptionalimportrequestsimportyamlfromtyping_extensionsimportSelffromoumi.cliimportcli_utilsfromoumi.cli.aliasimportAliasType,try_get_config_name_for_aliasfromoumi.core.configsimportBaseConfigfromoumi.core.configs.inference_configimportInferenceConfigfromoumi.core.configs.params.judge_paramsimportJudgeParamsJUDGE_CONFIG_REPO_PATH_TEMPLATE="oumi://configs/projects/judges/{path}.yaml"
[docs]@dataclassclassJudgeConfig(BaseConfig):"""Consolidated configuration for the Judge. This class combines the judge parameters (JudgeParams) and inference configuration (InferenceConfig) into a single configuration object. Example: >>> judge_config = JudgeConfig( # doctest: +SKIP ... judge_params=JudgeParams( ... prompt_template="Is this helpful? {question}, {answer}", ... response_format=JudgeResponseFormat.XML, ... judgment_type=JudgeOutputType.BOOL, ... include_explanation=False ... ), ... inference_config=InferenceConfig( ... model=ModelParams(model_name="gpt-4.1"), ... generation=GenerationParams(max_tokens=100), ... engine=InferenceEngineType.OPENAI ... ) ... ) """judge_params:JudgeParams"""Parameters for the judge prompt and response format."""inference_config:InferenceConfig"""Configuration for the inference engine and generation parameters."""
[docs]@classmethoddeffrom_path(cls,path:str,extra_args:Optional[list[str]]=None)->Self:"""Resolve the JudgeConfig from a local or repo path."""def_resolve_path(unresolved_path:str)->Optional[str]:try:# Attempt to resolve the path using CLI utilities.# This will handle both local paths and repo (oumi://) paths.resolved_path=str(cli_utils.resolve_and_fetch_config(unresolved_path,))except(requests.exceptions.RequestException,# Network/HTTP issuesyaml.YAMLError,# YAML parsing errorsOSError,# File system operations (includes IOError)):# If resolution fails, mask the error and return None.returnNone# If resolution succeeds, check if the resolved path exists indeed.returnresolved_pathifPath(resolved_path).exists()elseNoneifextra_argsisNone:extra_args=[]# If `path` is an alias, resolve it to the corresponding oumi:// path.path=try_get_config_name_for_alias(path,AliasType.JUDGE)# If `path` is a local or repo path, load JudgeConfig obj from that path.# Repo example: path = "oumi://configs/projects/judges/doc_qa/relevance.yaml"# Local example: path= "./local_path/relevance.yaml"resolved_path=_resolve_path(path)# If `path` is a built-in judge name, construct the path from the default# repo location, and then load the corresponding JudgeConfig.# Example:# "doc_qa/relevance" => "oumi://configs/projects/judges/doc_qa/relevance.yaml"ifnotresolved_path:resolved_path=_resolve_path(JUDGE_CONFIG_REPO_PATH_TEMPLATE.format(path=path))ifresolved_path:try:returncls.from_yaml_and_arg_list(resolved_path,extra_args)exceptExceptionase:raiseValueError(f"Failed to parse {resolved_path} as JudgeConfig. "f"Please ensure the YAML file contains both 'judge_params' and "f"'inference_config' sections with valid fields. "f"Original error: {e}")fromeelse:raiseValueError(f"Could not resolve JudgeConfig from path: {path}. ""Please provide a valid local or GitHub repo path.")