mirror of
https://github.com/containers/ramalama.git
synced 2026-02-05 15:47:26 +01:00
added shared prompt utils
Signed-off-by: Ian Eaves <ian.k.eaves@gmail.com>
This commit is contained in:
@@ -41,6 +41,36 @@ from ramalama.proxy_support import setup_proxy_support
|
||||
setup_proxy_support()
|
||||
|
||||
|
||||
def res(response, color):
|
||||
color_default = ""
|
||||
color_yellow = ""
|
||||
if (color == "auto" and should_colorize()) or color == "always":
|
||||
color_default = "\033[0m"
|
||||
color_yellow = "\033[33m"
|
||||
|
||||
print("\r", end="")
|
||||
assistant_response = ""
|
||||
for line in response:
|
||||
line = line.decode("utf-8").strip()
|
||||
if line.startswith("data: {"):
|
||||
choice = ""
|
||||
|
||||
json_line = json.loads(line[len("data: ") :])
|
||||
if "choices" in json_line and json_line["choices"]:
|
||||
choice = json_line["choices"][0]["delta"]
|
||||
if "content" in choice:
|
||||
choice = choice["content"]
|
||||
else:
|
||||
continue
|
||||
|
||||
if choice:
|
||||
print(f"{color_yellow}{choice}{color_default}", end="", flush=True)
|
||||
assistant_response += choice
|
||||
|
||||
print("")
|
||||
return assistant_response
|
||||
|
||||
|
||||
def add_api_key(args, headers=None):
|
||||
# static analyzers suggest for dict, this is a safer way of setting
|
||||
# a default value, rather than using the parameter directly
|
||||
|
||||
@@ -40,8 +40,7 @@ from ramalama.config import (
|
||||
get_inference_spec_files,
|
||||
load_file_config,
|
||||
)
|
||||
from ramalama.config_types import COLOR_OPTIONS
|
||||
from ramalama.console import EMOJI
|
||||
|
||||
from ramalama.endian import EndianMismatchError
|
||||
from ramalama.log_levels import LogLevel
|
||||
from ramalama.logger import configure_logger, logger
|
||||
@@ -82,25 +81,6 @@ def get_shortnames():
|
||||
return Shortnames()
|
||||
|
||||
|
||||
def default_prefix():
|
||||
# Keep in sync with ramalama.chat.default_prefix to avoid importing chat at startup.
|
||||
if not EMOJI:
|
||||
return "> "
|
||||
|
||||
config = get_config()
|
||||
if config.prefix:
|
||||
return config.prefix
|
||||
|
||||
engine = config.engine
|
||||
if engine:
|
||||
if os.path.basename(engine) == "podman":
|
||||
return "\U0001f9ad > "
|
||||
if os.path.basename(engine) == "docker":
|
||||
return "\U0001f40b > "
|
||||
|
||||
return "\U0001f999 > "
|
||||
|
||||
|
||||
def assemble_command_lazy(cli_args: argparse.Namespace) -> list[str]:
|
||||
from ramalama.command.factory import assemble_command
|
||||
|
||||
|
||||
23
ramalama/prompt_utils.py
Normal file
23
ramalama/prompt_utils.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import os
|
||||
|
||||
from ramalama.config import get_config
|
||||
from ramalama.console import EMOJI
|
||||
|
||||
|
||||
def default_prefix() -> str:
|
||||
if not EMOJI:
|
||||
return "> "
|
||||
|
||||
config = get_config()
|
||||
if config.prefix:
|
||||
return config.prefix
|
||||
|
||||
engine = config.engine
|
||||
if engine:
|
||||
engine_name = os.path.basename(engine)
|
||||
if engine_name == "podman":
|
||||
return "\U0001f9ad > "
|
||||
if engine_name == "docker":
|
||||
return "\U0001f40b > "
|
||||
|
||||
return "\U0001f999 > "
|
||||
Reference in New Issue
Block a user