Skip to content

Commit d1cc571

Browse files
authored
Merge pull request #1867 from rhatdan/except
Catch KeyError exceptions
2 parents d677908 + e9cb891 commit d1cc571

File tree

9 files changed

+17
-23
lines changed

9 files changed

+17
-23
lines changed

ramalama/cli.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1277,20 +1277,14 @@ def eprint(e, exit_code):
12771277
eprint(f"pulling {e.geturl()} failed: {e}", errno.EINVAL)
12781278
except HelpException:
12791279
parser.print_help()
1280-
except IndexError as e:
1280+
except (ConnectionError, IndexError, KeyError, ValueError) as e:
12811281
eprint(e, errno.EINVAL)
1282-
except KeyError as e:
1283-
eprint(e, 1)
12841282
except NotImplementedError as e:
1285-
eprint(e, errno.ENOTSUP)
1283+
eprint(e, errno.ENOSYS)
12861284
except subprocess.CalledProcessError as e:
12871285
eprint(e, e.returncode)
12881286
except KeyboardInterrupt:
12891287
sys.exit(0)
1290-
except ConnectionError as e:
1291-
eprint(e, errno.EINVAL)
1292-
except ValueError as e:
1293-
eprint(e, errno.EINVAL)
12941288
except IOError as e:
12951289
eprint(e, errno.EIO)
12961290
except ParseError as e:

ramalama/common.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -574,13 +574,13 @@ def select_cuda_image(config: Config) -> str:
574574
str: The appropriate CUDA image name
575575
576576
Raises:
577-
RuntimeError: If CUDA version is less than 12.4
577+
NotImplementedError: If CUDA version is less than 12.4
578578
"""
579579
# Get the default CUDA image from config
580580
cuda_image = config.images.get("CUDA_VISIBLE_DEVICES")
581581

582582
if cuda_image is None:
583-
raise RuntimeError("No image repository found for CUDA_VISIBLE_DEVICES in config.")
583+
raise NotImplementedError("No image repository found for CUDA_VISIBLE_DEVICES in config.")
584584

585585
# Check CUDA version and select appropriate image
586586
cuda_version = check_cuda_version()
@@ -591,7 +591,7 @@ def select_cuda_image(config: Config) -> str:
591591
elif cuda_version >= (12, 4):
592592
return f"{cuda_image}-12.4.1" # Use the specific version for older CUDA
593593
else:
594-
raise RuntimeError(f"CUDA version {cuda_version} is not supported. Minimum required version is 12.4.")
594+
raise NotImplementedError(f"CUDA version {cuda_version} is not supported. Minimum required version is 12.4.")
595595

596596

597597
class AccelImageArgsWithImage(Protocol):

test/system/010-list.bats

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ size | [0-9]\\\+
5151

5252
@test "ramalama rm --ignore" {
5353
random_image_name=i_$(safename)
54-
run_ramalama 1 rm ${random_image_name}
54+
run_ramalama 22 rm ${random_image_name}
5555
is "$output" "Error: Model '${random_image_name}' not found.*"
5656
run_ramalama rm --ignore ${random_image_name}
5757
is "$output" ""

test/system/015-help.bats

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -213,7 +213,7 @@ EOF
213213

214214
@test "ramalama verify transport" {
215215
transport=e_$(safename)
216-
RAMALAMA_TRANSPORT=${transport} run_ramalama 1 pull foobar
216+
RAMALAMA_TRANSPORT=${transport} run_ramalama 22 pull foobar
217217
is "$output" "Error: transport \"${transport}\" not supported. Must be oci, huggingface, modelscope, or ollama." "Verify bogus transport throws error"
218218

219219
}

test/system/030-run.bats

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -93,11 +93,11 @@ EOF
9393
is "$output" '.*serve.*--ctx-size 4096 --temp 0.8.*' "dryrun correct"
9494
is "$output" ".*--ctx-size 4096" "verify model name"
9595

96-
run_ramalama 1 run --ctx-size=4096 --name foobar ${MODEL}
96+
run_ramalama 22 run --ctx-size=4096 --name foobar ${MODEL}
9797
is "${lines[0]}" "Error: --nocontainer and --name options conflict. The --name option requires a container." "conflict between nocontainer and --name line"
98-
run_ramalama 1 run --name foobar ${MODEL}
98+
run_ramalama 22 run --name foobar ${MODEL}
9999
is "${lines[0]}" "Error: --nocontainer and --name options conflict. The --name option requires a container." "conflict between nocontainer and --name line"
100-
run_ramalama 1 run --privileged ${MODEL}
100+
run_ramalama 22 run --privileged ${MODEL}
101101
is "${lines[0]}" "Error: --nocontainer and --privileged options conflict. The --privileged option requires a container." "conflict between nocontainer and --privileged line"
102102
fi
103103
}

test/system/040-serve.bats

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ verify_begin=".*run --rm"
8080
run_ramalama -q --dryrun serve --seed abcd --host 127.0.0.1 ${model}
8181
assert "$output" =~ ".*--host 127.0.0.1" "Outside container overrides host to 127.0.0.1"
8282
assert "$output" =~ ".*--seed abcd" "Verify seed is set"
83-
run_ramalama 1 --nocontainer serve --name foobar tiny
83+
run_ramalama 22 --nocontainer serve --name foobar tiny
8484
is "${lines[0]}" "Error: --nocontainer and --name options conflict. The --name option requires a container." "conflict between nocontainer and --name line"
8585
fi
8686

@@ -100,7 +100,7 @@ verify_begin=".*run --rm"
100100
run_ramalama 22 -q --dryrun serve --runtime-args="--foo='a b c" ${model}
101101
assert "$output" =~ "No closing quotation" "error for improperly quoted runtime arguments"
102102

103-
run_ramalama 1 serve MODEL
103+
run_ramalama 22 serve MODEL
104104
assert "$output" =~ "Error: Manifest for MODEL:latest was not found in the Ollama registry"
105105
}
106106

test/system/050-pull.bats

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ load setup_suite
2525
run_ramalama rm https://ollama.com/library/smollm:135m ollama://smollm:360m
2626

2727
random_image_name=i_$(safename)
28-
run_ramalama 1 -q pull ${random_image_name}
28+
run_ramalama 22 -q pull ${random_image_name}
2929
is "$output" "Error: Manifest for ${random_image_name}:latest was not found in the Ollama registry"
3030
}
3131

@@ -54,7 +54,7 @@ load setup_suite
5454
ollama rm smollm:135m smollm:360m
5555

5656
random_image_name=i_$(safename)
57-
run_ramalama 1 -q pull ${random_image_name}
57+
run_ramalama 22 -q pull ${random_image_name}
5858
is "$output" "Error: Manifest for ${random_image_name}:latest was not found in the Ollama registry"
5959

6060
pkill ollama

test/system/055-convert.bats

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ load helpers
99
is "$output" ".*ramalama convert: error: the following arguments are required: SOURCE, TARGET"
1010
run_ramalama 2 convert tiny
1111
is "$output" ".*ramalama convert: error: the following arguments are required: TARGET"
12-
run_ramalama 1 convert bogus foobar
12+
run_ramalama 22 convert bogus foobar
1313
is "$output" "Error: Manifest for bogus:latest was not found in the Ollama registry"
1414
else
1515
run_ramalama 22 convert tiny quay.io/ramalama/foobar

test/system/080-mlx.bats

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ function skip_if_no_mlx() {
163163
skip_if_no_mlx
164164

165165
# --name requires container mode, which MLX doesn't support
166-
run_ramalama 1 --runtime=mlx run --name test ${MODEL}
166+
run_ramalama 22 --runtime=mlx run --name test ${MODEL}
167167
is "$output" ".*--nocontainer.*--name.*conflict.*" "should show conflict error"
168168
}
169169

@@ -172,6 +172,6 @@ function skip_if_no_mlx() {
172172
skip_if_no_mlx
173173

174174
# --privileged requires container mode, which MLX doesn't support
175-
run_ramalama 1 --runtime=mlx run --privileged ${MODEL}
175+
run_ramalama 22 --runtime=mlx run --privileged ${MODEL}
176176
is "$output" ".*--nocontainer.*--privileged.*conflict.*" "should show conflict error"
177177
}

0 commit comments

Comments
 (0)