@@ -379,6 +379,12 @@ def bench_cli(args):
379
379
380
380
def bench_parser (subparsers ):
381
381
parser = subparsers .add_parser ("bench" , aliases = ["benchmark" ], help = "benchmark specified AI Model" )
382
+ parser .add_argument (
383
+ "--network-mode" ,
384
+ type = str ,
385
+ default = "none" ,
386
+ help = "set the network mode for the container" ,
387
+ )
382
388
parser .add_argument ("MODEL" ) # positional argument
383
389
parser .set_defaults (func = bench_cli )
384
390
@@ -600,6 +606,13 @@ def convert_parser(subparsers):
600
606
Model "car" includes base image with the model stored in a /models subdir.
601
607
Model "raw" contains the model and a link file model.file to it stored at /.""" ,
602
608
)
609
+ # https://docs.podman.io/en/latest/markdown/podman-build.1.html#network-mode-net
610
+ parser .add_argument (
611
+ "--network-mode" ,
612
+ type = str ,
613
+ default = "none" ,
614
+ help = "sets the configuration for network namespaces when handling RUN instructions" ,
615
+ )
603
616
parser .add_argument ("SOURCE" ) # positional argument
604
617
parser .add_argument ("TARGET" ) # positional argument
605
618
parser .set_defaults (func = convert_cli )
@@ -717,6 +730,15 @@ def _run(parser):
717
730
def run_parser (subparsers ):
718
731
parser = subparsers .add_parser ("run" , help = "run specified AI Model as a chatbot" )
719
732
_run (parser )
733
+ # Disable network access by default, and give the option to pass any supported network mode into
734
+ # podman if needed:
735
+ # https://docs.podman.io/en/latest/markdown/podman-run.1.html#network-mode-net
736
+ parser .add_argument (
737
+ "--network-mode" ,
738
+ type = str ,
739
+ default = "none" ,
740
+ help = "set the network mode for the container" ,
741
+ )
720
742
parser .add_argument ("MODEL" ) # positional argument
721
743
parser .add_argument (
722
744
"ARGS" , nargs = "*" , help = "Overrides the default prompt, and the output is returned without entering the chatbot"
@@ -742,6 +764,16 @@ def serve_parser(subparsers):
742
764
parser .add_argument (
743
765
"-p" , "--port" , default = config .get ('port' , "8080" ), help = "port for AI Model server to listen on"
744
766
)
767
+ # --network-mode=bridge lets the container listen on localhost, and is an option that's compatible
768
+ # with podman and docker:
769
+ # https://docs.podman.io/en/latest/markdown/podman-run.1.html#network-mode-net
770
+ # https://docs.docker.com/engine/network/#drivers
771
+ parser .add_argument (
772
+ "--network-mode" ,
773
+ type = str ,
774
+ default = "bridge" ,
775
+ help = "set the network mode for the container" ,
776
+ )
745
777
parser .add_argument ("MODEL" ) # positional argument
746
778
parser .set_defaults (func = serve_cli )
747
779
0 commit comments