Skip to content

predict_and_submit

Logic for predicting and creating submission file for a single model.

kelp.nn.inference.predict_and_submit.PredictAndSubmitConfig

Bases: PredictConfig

Config for running prediction and submission file generation in a single pass.

Source code in kelp/nn/inference/predict_and_submit.py
15
16
17
18
19
class PredictAndSubmitConfig(PredictConfig):
    """Config for running prediction and submission file generation in a single pass."""

    preview_submission: bool = False
    preview_first_n: int = 10

kelp.nn.inference.predict_and_submit.copy_run_artifacts

Copies run artifacts from run_dir to output_dir.

Parameters:

Name Type Description Default
run_dir Path

The directory to copy run artifacts from.

required
output_dir Path

The output directory.

required
Source code in kelp/nn/inference/predict_and_submit.py
39
40
41
42
43
44
45
46
47
48
def copy_run_artifacts(run_dir: Path, output_dir: Path) -> None:
    """
    Copies run artifacts from run_dir to output_dir.

    Args:
        run_dir: The directory to copy run artifacts from.
        output_dir: The output directory.

    """
    shutil.copytree(run_dir, output_dir / run_dir.name, dirs_exist_ok=True)

kelp.nn.inference.predict_and_submit.main

The main entry point for running prediction and submission file generation.

Source code in kelp/nn/inference/predict_and_submit.py
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
def main() -> None:
    """The main entry point for running prediction and submission file generation."""
    cfg = parse_args()
    now = datetime.utcnow().isoformat()
    out_dir = cfg.output_dir / now
    preds_dir = cfg.output_dir / now / "predictions"
    preds_dir.mkdir(exist_ok=False, parents=True)
    (out_dir / "predict_config.yaml").write_text(yaml.dump(cfg.model_dump(mode="json")))
    if cfg.training_config.sahi:
        run_sahi_prediction(
            data_dir=cfg.data_dir,
            output_dir=preds_dir,
            model_checkpoint=cfg.model_checkpoint,
            use_mlflow=cfg.use_mlflow,
            train_cfg=cfg.training_config,
            tta=cfg.tta,
            soft_labels=cfg.soft_labels,
            tta_merge_mode=cfg.tta_merge_mode,
            decision_threshold=cfg.decision_threshold,
            sahi_tile_size=cfg.sahi_tile_size,
            sahi_overlap=cfg.sahi_overlap,
        )
    else:
        run_prediction(
            data_dir=cfg.data_dir,
            output_dir=preds_dir,
            model_checkpoint=cfg.model_checkpoint,
            use_mlflow=cfg.use_mlflow,
            train_cfg=cfg.training_config,
            tta=cfg.tta,
            tta_merge_mode=cfg.tta_merge_mode,
            decision_threshold=cfg.decision_threshold,
        )
    create_submission_tar(
        preds_dir=preds_dir,
        output_dir=out_dir,
    )
    copy_run_artifacts(
        run_dir=cfg.run_dir,  # type: ignore[arg-type]
        output_dir=out_dir,
    )
    if cfg.preview_submission:
        plot_first_n_samples(
            data_dir=cfg.data_dir,
            submission_dir=out_dir,
            output_dir=out_dir / "previews",
            n=cfg.preview_first_n,
        )

kelp.nn.inference.predict_and_submit.parse_args

Parse command line arguments.

Returns: An instance of PredictAndSubmitConfig.

Source code in kelp/nn/inference/predict_and_submit.py
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
def parse_args() -> PredictAndSubmitConfig:
    """
    Parse command line arguments.

    Returns: An instance of PredictAndSubmitConfig.

    """
    parser = build_prediction_arg_parser()
    parser.add_argument("--preview_submission", action="store_true")
    parser.add_argument("--preview_first_n", type=int, default=10)
    args = parser.parse_args()
    cfg = PredictAndSubmitConfig(**vars(args))
    cfg.log_self()
    cfg.output_dir.mkdir(exist_ok=True, parents=True)
    return cfg