From 69c6ede6149ad1e6609384ddc9d5cc97b2865400 Mon Sep 17 00:00:00 2001 From: anovazzi1 Date: Tue, 16 Jan 2024 14:01:22 -0300 Subject: [PATCH] Update LlamaCppComponent parameters --- src/backend/langflow/components/llms/LlamaCpp.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/src/backend/langflow/components/llms/LlamaCpp.py b/src/backend/langflow/components/llms/LlamaCpp.py index d8c917ba2..109bb1a7b 100644 --- a/src/backend/langflow/components/llms/LlamaCpp.py +++ b/src/backend/langflow/components/llms/LlamaCpp.py @@ -24,7 +24,7 @@ class LlamaCppComponent(CustomComponent): "max_tokens": {"display_name": "Max Tokens", "advanced": True}, "metadata": {"display_name": "Metadata", "advanced": True}, "model_kwargs": {"display_name": "Model Kwargs", "advanced": True}, - "model_path": {"display_name": "Model Path"}, + "model_path": {"display_name": "Model Path","field_type":"file", "file_types":[".bin"],"required":True}, "n_batch": {"display_name": "N Batch", "advanced": True}, "n_ctx": {"display_name": "N Ctx", "advanced": True}, "n_gpu_layers": {"display_name": "N GPU Layers", "advanced": True}, @@ -63,20 +63,20 @@ class LlamaCppComponent(CustomComponent): lora_path: Optional[str] = None, max_tokens: Optional[int] = 256, metadata: Optional[Dict] = None, - model_kwargs: Optional[Dict] = None, + model_kwargs: Optional[Dict] = {}, n_batch: Optional[int] = 8, n_ctx: Optional[int] = 512, - n_gpu_layers: Optional[int] = None, + n_gpu_layers: Optional[int] = 1, n_parts: Optional[int] = -1, - n_threads: Optional[int] = None, + n_threads: Optional[int] = 1, repeat_penalty: Optional[float] = 1.1, rope_freq_base: Optional[float] = 10000.0, rope_freq_scale: Optional[float] = 1.0, seed: Optional[int] = -1, - stop: Optional[List[str]] = None, + stop: Optional[List[str]] = [], streaming: Optional[bool] = True, - suffix: Optional[str] = None, - tags: Optional[List[str]] = None, + suffix: Optional[str] = "", + tags: Optional[List[str]] = [], temperature: Optional[float] = 0.8, top_k: Optional[int] = 40, top_p: Optional[float] = 0.95, @@ -85,8 +85,6 @@ class LlamaCppComponent(CustomComponent): verbose: Optional[bool] = True, vocab_only: Optional[bool] = False, ) -> LlamaCpp: - # Here you would instantiate the LlamaCpp model with the provided parameters - # Since the actual implementation of LlamaCpp is not provided, this is a placeholder return LlamaCpp( model_path=model_path, grammar=grammar,