Skip to content

Commit 9fb76b6

Browse files
committed
Move model and rag to use shared Engine implementation
Shrink the size of cly.py and model.py by moving all engine related functions into new engine.py python module. Signed-off-by: Daniel J Walsh <[email protected]>
1 parent f7d39c4 commit 9fb76b6

File tree

8 files changed

+434
-336
lines changed

8 files changed

+434
-336
lines changed

.codespellrc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
skip = ./logos,./vendor,./.git #,bin,vendor,.git,go.sum,changelog.txt,.cirrus.yml,"RELEASE_NOTES.md,*.xz,*.gz,*.tar,*.tgz,bin2img,*ico,*.png,*.1,*.5,*.7,copyimg,*.orig,apidoc.go"
66

77
# Comma separated list of words to be ignored. Words must be lowercased.
8-
ignore-words-list = clos,creat,ro,hastable,shouldnot,mountns,passt
8+
ignore-words-list = clos,creat,ro,hastable,shouldnot,mountns,passt,assertin
99

1010
# Custom dictionary file that contains spelling corrections.
1111
# Run with option '--dictionary=-' to include also default dictionary.

ramalama/cli.py

Lines changed: 12 additions & 101 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
import json
44
import os
55
import shlex
6-
import subprocess
76
from datetime import datetime, timezone
87
from pathlib import Path
98

@@ -15,9 +14,10 @@
1514
except Exception:
1615
suppressCompleter = None
1716

17+
import ramalama.engine as engine
1818
import ramalama.oci
1919
import ramalama.rag
20-
from ramalama.common import accel_image, exec_cmd, get_accel, get_cmd_with_wrapper, perror, run_cmd
20+
from ramalama.common import accel_image, exec_cmd, get_accel, get_cmd_with_wrapper, perror
2121
from ramalama.config import CONFIG
2222
from ramalama.model import MODEL_TYPES
2323
from ramalama.model_factory import ModelFactory
@@ -43,17 +43,17 @@ def local_env(**kwargs):
4343

4444

4545
def local_models(prefix, parsed_args, **kwargs):
46-
return [model['name'] for model in _list_models(parsed_args)]
46+
return [model['name'] for model in engine.list_models(parsed_args)]
4747

4848

4949
def local_containers(prefix, parsed_args, **kwargs):
5050
parsed_args.format = '{{.Names}}'
51-
return _list_containers(parsed_args)
51+
return engine.containers(parsed_args)
5252

5353

5454
def local_images(prefix, parsed_args, **kwargs):
5555
parsed_args.format = "{{.Repository}}:{{.Tag}}"
56-
return _list_images(parsed_args)
56+
return engine.images(parsed_args)
5757

5858

5959
class ArgumentParserWithDefaults(argparse.ArgumentParser):
@@ -396,60 +396,11 @@ def containers_parser(subparsers):
396396
parser.set_defaults(func=list_containers)
397397

398398

399-
def _list_images(args):
400-
conman = args.engine
401-
if conman == "" or conman is None:
402-
raise ValueError("no container manager (Podman, Docker) found")
403-
404-
conman_args = [conman, "images"]
405-
if hasattr(args, "noheading") and args.noheading:
406-
conman_args += ["--noheading"]
407-
408-
if hasattr(args, "notrunc") and args.notrunc:
409-
conman_args += ["--no-trunc"]
410-
411-
if args.format:
412-
conman_args += [f"--format={args.format}"]
413-
414-
try:
415-
output = run_cmd(conman_args, debug=args.debug).stdout.decode("utf-8").strip()
416-
if output == "":
417-
return []
418-
return output.split("\n")
419-
except subprocess.CalledProcessError as e:
420-
perror("ramalama list command requires a running container engine")
421-
raise (e)
422-
423-
424-
def _list_containers(args):
425-
conman = args.engine
426-
if conman == "" or conman is None:
427-
raise ValueError("no container manager (Podman, Docker) found")
428-
429-
conman_args = [conman, "ps", "-a", "--filter", "label=ai.ramalama"]
430-
if hasattr(args, "noheading") and args.noheading:
431-
conman_args += ["--noheading"]
432-
433-
if hasattr(args, "notrunc") and args.notrunc:
434-
conman_args += ["--no-trunc"]
435-
436-
if args.format:
437-
conman_args += [f"--format={args.format}"]
438-
439-
try:
440-
output = run_cmd(conman_args, debug=args.debug).stdout.decode("utf-8").strip()
441-
if output == "":
442-
return []
443-
return output.split("\n")
444-
except subprocess.CalledProcessError as e:
445-
perror("ramalama list command requires a running container engine")
446-
raise (e)
447-
448-
449399
def list_containers(args):
450-
if len(_list_containers(args)) == 0:
400+
containers = engine.containers(args)
401+
if len(containers) == 0:
451402
return
452-
print("\n".join(_list_containers(args)))
403+
print("\n".join(containers))
453404

454405

455406
def info_parser(subparsers):
@@ -537,21 +488,6 @@ def _list_models(args):
537488
return models
538489

539490

540-
def engine_info(args):
541-
conman = args.engine
542-
if conman == "":
543-
raise ValueError("no container manager (Podman, Docker) found")
544-
545-
conman_args = [conman, "info", "--format", "json"]
546-
try:
547-
output = run_cmd(conman_args, debug=args.debug).stdout.decode("utf-8").strip()
548-
if output == "":
549-
return []
550-
return json.loads(output)
551-
except FileNotFoundError as e:
552-
return str(e)
553-
554-
555491
def info_cli(args):
556492
info = {
557493
"Engine": {
@@ -564,7 +500,7 @@ def info_cli(args):
564500
"Version": version(),
565501
}
566502
if args.engine and len(args.engine) > 0:
567-
info["Engine"]["Info"] = engine_info(args)
503+
info["Engine"]["Info"] = engine.info(args)
568504

569505
info["Accelerator"] = get_accel()
570506
print(json.dumps(info, sort_keys=True, indent=4))
@@ -997,41 +933,16 @@ def stop_parser(subparsers):
997933
parser.set_defaults(func=stop_container)
998934

999935

1000-
def _stop_container(args, name):
1001-
if not name:
1002-
raise ValueError("must specify a container name")
1003-
conman = args.engine
1004-
if conman == "":
1005-
raise ValueError("no container manager (Podman, Docker) found")
1006-
1007-
conman_args = [conman, "stop", "-t=0"]
1008-
ignore_stderr = False
1009-
if args.ignore:
1010-
if conman == "podman":
1011-
conman_args += ["--ignore", str(args.ignore)]
1012-
else:
1013-
ignore_stderr = True
1014-
1015-
conman_args += [name]
1016-
try:
1017-
run_cmd(conman_args, ignore_stderr=ignore_stderr, debug=args.debug)
1018-
except subprocess.CalledProcessError:
1019-
if args.ignore and conman == "docker":
1020-
return
1021-
else:
1022-
raise
1023-
1024-
1025936
def stop_container(args):
1026937
if not args.all:
1027-
return _stop_container(args, args.NAME)
938+
return engine.stop_container(args, args.NAME)
1028939

1029940
if args.NAME:
1030941
raise ValueError("specifying --all and container name, %s, not allowed" % args.NAME)
1031942
args.ignore = True
1032943
args.format = "{{ .Names }}"
1033-
for i in _list_containers(args):
1034-
_stop_container(args, i)
944+
for i in engine.containers(args):
945+
engine.stop_container(args, i)
1035946

1036947

1037948
def version_parser(subparsers):

0 commit comments

Comments
 (0)