diff --git a/docs/library.mdx b/docs/library.mdx index 228c6d95f..c77a338b7 100644 --- a/docs/library.mdx +++ b/docs/library.mdx @@ -211,7 +211,6 @@ To customize PandasAI's `SmartDataframe`, you can either pass a `config` object Settings: - `llm`: the LLM to use. You can pass an instance of an LLM or the name of an LLM. You can use one of the LLMs supported. You can find more information about LLMs [here](/llms) -- `llm_options`: the options to use for the LLM (for example the api token, etc). You can find more information about the settings [here](/llms). - `save_logs`: whether to save the logs of the LLM. Defaults to `True`. You will find the logs in the `pandasai.log` file in the root of your project. - `verbose`: whether to print the logs in the console as PandasAI is executed. Defaults to `False`. - `enforce_privacy`: whether to enforce privacy. Defaults to `False`. If set to `True`, PandasAI will not send any data to the LLM, but only the metadata. By default, PandasAI will send 5 samples that are anonymized to improve the accuracy of the results. diff --git a/docs/llms.mdx b/docs/llms.mdx index 98897dda4..5268aa0c2 100644 --- a/docs/llms.mdx +++ b/docs/llms.mdx @@ -7,38 +7,7 @@ The generated code is then executed to produce the result. [![Choose the LLM](https://cdn.loom.com/sessions/thumbnails/5496c9c07ee04f69bfef1bc2359cd591-00001.jpg)](https://www.loom.com/share/5496c9c07ee04f69bfef1bc2359cd591 "Choose the LLM") -You can either choose a LLM by either instantiating it and passing it to the `SmartDataFrame` or `SmartDatalake` constructor, -or by specifying it in the `pandasai.json` configuration file. - -If the model expects one or more parameters, you can pass them to the constructor or specify them in the `pandasai.json` -file, in the `llm_options` parameters, Here’s an example of how to structure your `pandasai.json` file: - -```json -{ - "llm": "BambooLLM", - "llm_options": { - "api_key": "API_KEY_GOES_HERE" - } -} -``` -> **Note:** -> `pandasai.json` can be configure for any LLM. - -## Working with pandasai.json file - -In this example, `data.csv` is your data file, and pandasai.json is the configuration file. Make sure the configuration file is named `pandasai.json` and is in the same folder as your code. - -```python -from pandasai import SmartDataframe -from pandasai.config import load_config_from_json - -# Load configuration from pandasai.json -config = load_config_from_json() - -df = SmartDataframe("data.csv", config=config) -response = df.chat("give me revenue of Top 5 companies for year 2021") -print(response) -``` +You can instantiate the LLM by passing it as a config to the SmartDataFrame or SmartDatalake constructor. ## BambooLLM diff --git a/pandasai.json b/pandasai.json index 9216a2434..26632c58e 100644 --- a/pandasai.json +++ b/pandasai.json @@ -8,7 +8,5 @@ "open_charts": true, "save_charts": false, "save_charts_path": "exports/charts", - "custom_whitelisted_dependencies": [], - "llm": "BambooLLM", - "llm_options": null + "custom_whitelisted_dependencies": [] } diff --git a/pandasai/config.py b/pandasai/config.py index 6126819ca..e5470deb2 100644 --- a/pandasai/config.py +++ b/pandasai/config.py @@ -32,12 +32,9 @@ def load_config_from_json( with open(find_closest("pandasai.json"), "r") as f: config = json.load(f) - # if config is a dict - if config.get("llm") and not override_config.get("llm"): - options = config.get("llm_options") or {} - config["llm"] = getattr(llm, config["llm"])(**options) - elif not config.get("llm") and not override_config.get("llm"): + if not config.get("llm") and not override_config.get("llm"): config["llm"] = llm.BambooLLM() + except FileNotFoundError: # Ignore the error if the file does not exist, will use the default config pass diff --git a/pandasai/llm/__init__.py b/pandasai/llm/__init__.py index b377b2e97..891a7cb8c 100644 --- a/pandasai/llm/__init__.py +++ b/pandasai/llm/__init__.py @@ -8,6 +8,7 @@ from .huggingface_text_gen import HuggingFaceTextGen from .ibm_watsonx import IBMwatsonx from .langchain import LangchainLLM +from .local_llm import LocalLLM from .openai import OpenAI __all__ = [ @@ -22,4 +23,5 @@ "LangchainLLM", "BedrockClaude", "IBMwatsonx", + "LocalLLM", ]