Skip to content

Commit

Permalink
[MINOR] Rename from lightrag to adalflow
Browse files Browse the repository at this point in the history
  • Loading branch information
zjffdu committed Dec 13, 2024
1 parent 7137548 commit 14f85cb
Show file tree
Hide file tree
Showing 8 changed files with 26 additions and 26 deletions.
2 changes: 1 addition & 1 deletion adalflow/adalflow/components/output_parsers/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ class OutputParser(Component):
This interface helps users customize output parsers with consistent interfaces for the Generator.
Even though you don't always need to subclass it.
LightRAG uses two core components:
AdalFlow uses two core components:
1. the Prompt to format output instruction
2. A string parser component from core.string_parser for response parsing.
"""
Expand Down
4 changes: 2 additions & 2 deletions adalflow/adalflow/core/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from .component import Component, FunComponent, fun_to_component
from .container import Sequential
from .db import LocalDB
from .default_prompt_template import DEFAULT_LIGHTRAG_SYSTEM_PROMPT
from .default_prompt_template import DEFAULT_ADALFLOW_SYSTEM_PROMPT
from .embedder import Embedder, BatchEmbedder
from .generator import Generator, BackwardEngine
from .model_client import ModelClient
Expand Down Expand Up @@ -58,7 +58,7 @@
"Generator",
"BackwardEngine",
"Prompt",
"DEFAULT_LIGHTRAG_SYSTEM_PROMPT",
"DEFAULT_ADALFLOW_SYSTEM_PROMPT",
# "Parameter",
"required_field",
"ModelClient",
Expand Down
20 changes: 10 additions & 10 deletions adalflow/adalflow/core/default_prompt_template.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
"""This is the default system prompt template used in the LightRAG.
"""This is the default system prompt template used in the AdalFlow.
Use :ref:`Prompt <core-prompt_builder>` class to manage it.
"""

__all__ = [
"LIGHTRAG_DEFAULT_PROMPT_ARGS",
"LIGHTRAG_DEFAULT_PROMPT_TRAINABLE_PARAMS",
"SIMPLE_DEFAULT_LIGHTRAG_SYSTEM_PROMPT",
"DEFAULT_LIGHTRAG_SYSTEM_PROMPT",
"ADALFLOW_DEFAULT_PROMPT_ARGS",
"ADALFLOW_DEFAULT_PROMPT_TRAINABLE_PARAMS",
"SIMPLE_DEFAULT_ADALFLOW_SYSTEM_PROMPT",
"DEFAULT_ADALFLOW_SYSTEM_PROMPT",
]
# TODO: potentially make a data class for this
LIGHTRAG_DEFAULT_PROMPT_ARGS = [
ADALFLOW_DEFAULT_PROMPT_ARGS = [
"task_desc_str", # task description
"output_format_str", # output format of the task
"tools_str", # tools used in the task
Expand All @@ -21,17 +21,17 @@
"input_str", # user query or input
]

LIGHTRAG_DEFAULT_PROMPT_TRAINABLE_PARAMS = [
ADALFLOW_DEFAULT_PROMPT_TRAINABLE_PARAMS = [
"task_desc_str",
# "output_format_str",
"examples_str",
]

SIMPLE_DEFAULT_LIGHTRAG_SYSTEM_PROMPT = r"""<SYS>{{task_desc_str}}</SYS>
SIMPLE_DEFAULT_ADALFLOW_SYSTEM_PROMPT = r"""<SYS>{{task_desc_str}}</SYS>
User: {{input_str}}
You:"""

DEFAULT_LIGHTRAG_SYSTEM_PROMPT = r"""<START_OF_SYSTEM_PROMPT>
DEFAULT_ADALFLOW_SYSTEM_PROMPT = r"""<START_OF_SYSTEM_PROMPT>
{# task desc #}
{% if task_desc_str %}
{{task_desc_str}}
Expand Down Expand Up @@ -87,7 +87,7 @@
<END_OF_ASSISTANT_STEPS>
{% endif %}
"""
"""This is the default system prompt template used in the LightRAG.
"""This is the default system prompt template used in the AdalFlow.
Use :ref:`Prompt <core-prompt_builder>` class to manage it.
"""
4 changes: 2 additions & 2 deletions adalflow/adalflow/core/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from adalflow.core.prompt_builder import Prompt
from adalflow.core.functional import compose_model_kwargs
from adalflow.core.model_client import ModelClient
from adalflow.core.default_prompt_template import DEFAULT_LIGHTRAG_SYSTEM_PROMPT
from adalflow.core.default_prompt_template import DEFAULT_ADALFLOW_SYSTEM_PROMPT
from adalflow.optim.function import BackwardContext
from adalflow.utils.cache import CachedEngine
from adalflow.tracing.callback_manager import CallbackManager
Expand Down Expand Up @@ -113,7 +113,7 @@ def __init__(
Got {model_client} instead."
)

template = template or DEFAULT_LIGHTRAG_SYSTEM_PROMPT
template = template or DEFAULT_ADALFLOW_SYSTEM_PROMPT

# create the cache path and initialize the cache engine

Expand Down
6 changes: 3 additions & 3 deletions adalflow/adalflow/core/model_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,15 @@ class ModelClient(Component):
(1) Initialize the client, including both sync and async.
(2) Convert the standard LightRAG components inputs to the API-specific format.
(2) Convert the standard AdalFlow components inputs to the API-specific format.
(3) Call the API and parse the response.
(4) Handle API specific exceptions and errors to retry the call.
Check the subclasses in `components/model_client/` directory for the functional API clients we have.
This interface is designed to bridge the gap between LightRAG components inputs and model APIs.
This interface is designed to bridge the gap between AdalFlow components inputs and model APIs.
You can see examples of the subclasses in components/model_client/ directory.
"""
Expand Down Expand Up @@ -103,7 +103,7 @@ def track_completion_usage(self, *args, **kwargs) -> "CompletionUsage":
)

def parse_embedding_response(self, response: Any) -> "EmbedderOutput":
r"""Parse the embedding response to a structure LightRAG components can understand."""
r"""Parse the embedding response to a structure AdalFlow components can understand."""
raise NotImplementedError(
f"{type(self).__name__} must implement parse_embedding_response method"
)
Expand Down
4 changes: 2 additions & 2 deletions adalflow/adalflow/core/prompt_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


from adalflow.core.component import Component
from adalflow.core.default_prompt_template import DEFAULT_LIGHTRAG_SYSTEM_PROMPT
from adalflow.core.default_prompt_template import DEFAULT_ADALFLOW_SYSTEM_PROMPT
from adalflow.optim.parameter import Parameter


Expand Down Expand Up @@ -56,7 +56,7 @@ def __init__(
):
super().__init__()

self.template = template or DEFAULT_LIGHTRAG_SYSTEM_PROMPT
self.template = template or DEFAULT_ADALFLOW_SYSTEM_PROMPT
self.__create_jinja2_template()
self.prompt_variables: List[str] = []
for var in self._find_template_variables(self.template):
Expand Down
4 changes: 2 additions & 2 deletions docs/source/tutorials/generator.rst
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ An Orchestrator
It orchestrates three components:

- `Prompt`: by taking in ``template`` (string) and ``prompt_kwargs`` (dict) to format the prompt at initialization.
When the ``template`` is not provided, it defaults to :const:`DEFAULT_LIGHTRAG_SYSTEM_PROMPT<core.default_prompt_template.DEFAULT_LIGHTRAG_SYSTEM_PROMPT>`.
When the ``template`` is not provided, it defaults to :const:`DEFAULT_ADALFLOW_SYSTEM_PROMPT<core.default_prompt_template.DEFAULT_ADALFLOW_SYSTEM_PROMPT>`.

- `ModelClient`: by taking in an already instantiated ``model_client`` and ``model_kwargs`` to call the model.
Switching out the model client allows you to call any LLM model using the same prompt and output parsing.
Expand Down Expand Up @@ -485,7 +485,7 @@ It will require users to define ``Parameter`` and pass it to the ``prompt_kwargs

- :class:`core.generator.Generator`
- :class:`core.types.GeneratorOutput`
- :class:`core.default_prompt_template.DEFAULT_LIGHTRAG_SYSTEM_PROMPT`
- :class:`core.default_prompt_template.DEFAULT_ADALFLOW_SYSTEM_PROMPT`
- :class:`core.types.ModelClientType`
- :class:`core.types.ModelType`
- :class:`core.string_parser.JsonParser`
Expand Down
8 changes: 4 additions & 4 deletions docs/source/tutorials/prompt.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
<a href="https://colab.research.google.com/drive/1_sGeHaKrwpI9RiL01g3cKyI2_5PJqZtr?usp=sharing" target="_blank" style="margin-right: 10px;">
<img alt="Try Quickstart in Colab" src="https://colab.research.google.com/assets/colab-badge.svg" style="vertical-align: middle;">
</a>
<a href="https://github.com/SylphAI-Inc/LightRAG/blob/main/tutorials/prompt_note.py" target="_blank" style="display: flex; align-items: center;">
<a href="https://github.com/SylphAI-Inc/AdalFlow/blob/main/tutorials/prompt_note.py" target="_blank" style="display: flex; align-items: center;">
<img src="https://github.githubassets.com/images/modules/logos_page/GitHub-Mark.png" alt="GitHub" style="height: 20px; width: 20px; margin-right: 5px;">
<span style="vertical-align: middle;"> Open Source Code</span>
</a>
Expand Down Expand Up @@ -206,13 +206,13 @@ As with all components, you can use ``to_dict`` and ``from_dict`` to serialize a
Default Prompt Template
-------------------------

In default, the ``Prompt`` class uses the :const:`DEFAULT_LIGHTRAG_SYSTEM_PROMPT<core.default_prompt_template.DEFAULT_LIGHTRAG_SYSTEM_PROMPT>` as its string template if no template is provided.
In default, the ``Prompt`` class uses the :const:`DEFAULT_ADALFLOW_SYSTEM_PROMPT<core.default_prompt_template.DEFAULT_ADALFLOW_SYSTEM_PROMPT>` as its string template if no template is provided.
This default template allows you to conditionally passing seven important variables designed from the data flow diagram above.
These varaibles are:

.. code-block:: python
LIGHTRAG_DEFAULT_PROMPT_ARGS = [
ADALFLOW_DEFAULT_PROMPT_ARGS = [
"task_desc_str", # task description
"output_format_str", # output format of the task
"tools_str", # tools used in the task
Expand Down Expand Up @@ -266,4 +266,4 @@ The output will be the bare minimum with only the user query and a prefix for as
:class: highlight

- :class:`core.prompt_builder.Prompt`
- :const:`core.default_prompt_template.DEFAULT_LIGHTRAG_SYSTEM_PROMPT`
- :const:`core.default_prompt_template.DEFAULT_ADALFLOW_SYSTEM_PROMPT`

0 comments on commit 14f85cb

Please sign in to comment.