|
13 | 13 | from typing import Any, get_args |
14 | 14 | from urllib.parse import urlparse |
15 | 15 |
|
16 | | -from ramalama.benchmarks.errors import MissingStorageFolderError |
17 | 16 | from ramalama.benchmarks.manager import BenchmarksManager |
18 | 17 |
|
19 | 18 | # if autocomplete doesn't exist, just do nothing, don't break |
|
28 | 27 | import ramalama.chat as chat |
29 | 28 | from ramalama import engine |
30 | 29 | from ramalama.arg_types import DefaultArgsType |
31 | | -<<<<<<< HEAD |
32 | | -from ramalama.chat_utils import default_prefix |
33 | | -======= |
34 | 30 | from ramalama.benchmarks.utilities import print_bench_results |
35 | | -from ramalama.chat import default_prefix |
36 | | ->>>>>>> 8a473985 (adds benchmark metrics persistence) |
| 31 | +from ramalama.chat_utils import default_prefix |
37 | 32 | from ramalama.cli_arg_normalization import normalize_pull_arg |
38 | 33 | from ramalama.command.factory import assemble_command |
39 | 34 | from ramalama.common import accel_image, get_accel, perror |
@@ -659,13 +654,11 @@ def _list_models_from_store(args): |
659 | 654 | size_sum += file.size |
660 | 655 | last_modified = max(file.modified, last_modified) |
661 | 656 |
|
662 | | - ret.append( |
663 | | - { |
664 | | - "name": f"{model} (partial)" if is_partially_downloaded else model, |
665 | | - "modified": datetime.fromtimestamp(last_modified, tz=local_timezone).isoformat(), |
666 | | - "size": size_sum, |
667 | | - } |
668 | | - ) |
| 657 | + ret.append({ |
| 658 | + "name": f"{model} (partial)" if is_partially_downloaded else model, |
| 659 | + "modified": datetime.fromtimestamp(last_modified, tz=local_timezone).isoformat(), |
| 660 | + "size": size_sum, |
| 661 | + }) |
669 | 662 |
|
670 | 663 | # sort the listed models according to the desired order |
671 | 664 | ret.sort(key=lambda entry: entry[args.sort], reverse=args.order == "desc") |
|
0 commit comments