From db5974803d7224566f5ff1df11506e65678509c2 Mon Sep 17 00:00:00 2001 From: John Agapiou Date: Fri, 9 Aug 2024 04:29:17 -0700 Subject: [PATCH] remove unnecessary dependencies PiperOrigin-RevId: 661214283 Change-Id: I785210ae245f4c2f5c618c44dd09f51a8e9f03ff --- examples/modular/launch.py | 53 ++++++++----------------- examples/modular/notebook.ipynb | 53 ++++++++----------------- examples/requirements.txt | 2 - examples/tutorials/agent_tutorial.ipynb | 1 - setup.py | 2 - 5 files changed, 34 insertions(+), 77 deletions(-) diff --git a/examples/modular/launch.py b/examples/modular/launch.py index 57d429ec..401940f4 100644 --- a/examples/modular/launch.py +++ b/examples/modular/launch.py @@ -43,7 +43,6 @@ import argparse import datetime import importlib -import os import pathlib import sys @@ -56,47 +55,29 @@ from concordia.language_model import ollama_model from concordia.language_model import pytorch_gemma_model from concordia.utils import measurements as measurements_lib -import openai import sentence_transformers def language_model_setup(args): """Get the wrapped language model.""" - if not args.disable_language_model: - # By default this script uses GPT-4, so you must provide an API key. - # Note that it is also possible to use local models or other API models, - # simply replace the following with the correct initialization for the model - # you want to use. - if args.api_type == 'amazon_bedrock': - return amazon_bedrock_model.AmazonBedrockLanguageModel( - model_name=args.model_name) - elif args.api_type == 'google_aistudio_model': - return google_aistudio_model.GoogleAIStudioLanguageModel( - model_name=args.model_name) - elif args.api_type == 'langchain_ollama': - return langchain_ollama_model.LangchainOllamaLanguageModel( - model_name=args.model_name) - elif args.api_type == 'mistral': - mistral_api_key = os.environ['MISTRAL_API_KEY'] - if not mistral_api_key: - raise ValueError('Mistral api_key is required.') - return mistral_model.MistralLanguageModel(api_key=mistral_api_key, - model_name=args.model_name) - elif args.api_type == 'ollama': - return ollama_model.OllamaLanguageModel(model_name=args.model_name) - elif args.api_type == 'openai': - openai.api_key = os.environ['OPENAI_API_KEY'] - if not openai.api_key: - raise ValueError('OpenAI api_key is required.') - return gpt_model.GptLanguageModel(api_key=openai.api_key, - model_name=args.model_name) - elif args.api_type == 'pytorch_gemma': - return pytorch_gemma_model.PyTorchGemmaLanguageModel( - model_name=args.model_name) - else: - raise ValueError(f'Unrecognized api type: {args.api_type}') - else: + if args.disable_language_model: return no_language_model.NoLanguageModel() + elif args.api_type == 'amazon_bedrock': + return amazon_bedrock_model.AmazonBedrockLanguageModel(args.model_name) + elif args.api_type == 'google_aistudio_model': + return google_aistudio_model.GoogleAIStudioLanguageModel(args.model_name) + elif args.api_type == 'langchain_ollama': + return langchain_ollama_model.LangchainOllamaLanguageModel(args.model_name) + elif args.api_type == 'mistral': + return mistral_model.MistralLanguageModel(args.model_name) + elif args.api_type == 'ollama': + return ollama_model.OllamaLanguageModel(args.model_name) + elif args.api_type == 'openai': + return gpt_model.GptLanguageModel(args.model_name) + elif args.api_type == 'pytorch_gemma': + return pytorch_gemma_model.PyTorchGemmaLanguageModel(args.model_name) + else: + raise ValueError(f'Unrecognized api type: {args.api_type}') # Setup for command line arguments diff --git a/examples/modular/notebook.ipynb b/examples/modular/notebook.ipynb index b3b98dae..c745debd 100644 --- a/examples/modular/notebook.ipynb +++ b/examples/modular/notebook.ipynb @@ -81,8 +81,6 @@ "from concordia.language_model import ollama_model\n", "from concordia.language_model import pytorch_gemma_model\n", "from concordia.utils import measurements as measurements_lib\n", - "\n", - "import openai\n", "import sentence_transformers" ] }, @@ -170,41 +168,24 @@ "source": [ "# @title Language Model setup\n", "\n", - "if not DISABLE_LANGUAGE_MODEL:\n", - " # By default this script uses GPT-4, so you must provide an API key.\n", - " # Note that it is also possible to use local models or other API models,\n", - " # simply replace the following with the correct initialization for the model\n", - " # you want to use.\n", - " if API_TYPE == 'amazon_bedrock':\n", - " model = amazon_bedrock_model.AmazonBedrockLanguageModel(\n", - " model_name=MODEL_NAME)\n", - " elif API_TYPE == 'google_aistudio_model':\n", - " model = google_aistudio_model.GoogleAIStudioLanguageModel(\n", - " model_name=MODEL_NAME)\n", - " elif API_TYPE == 'langchain_ollama':\n", - " model = langchain_ollama_model.LangchainOllamaLanguageModel(\n", - " model_name=MODEL_NAME)\n", - " elif API_TYPE == 'mistral':\n", - " mistral_api_key = os.environ['MISTRAL_API_KEY']\n", - " if not mistral_api_key:\n", - " raise ValueError('Mistral api_key is required.')\n", - " model = mistral_model.MistralLanguageModel(api_key=mistral_api_key,\n", - " model_name=MODEL_NAME)\n", - " elif API_TYPE == 'ollama':\n", - " model = ollama_model.OllamaLanguageModel(model_name=MODEL_NAME)\n", - " elif API_TYPE == 'openai':\n", - " openai.api_key = os.environ['OPENAI_API_KEY']\n", - " if not openai.api_key:\n", - " raise ValueError('OpenAI api_key is required.')\n", - " model = gpt_model.GptLanguageModel(api_key=openai.api_key,\n", - " model_name=MODEL_NAME)\n", - " elif API_TYPE == 'pytorch_gemma':\n", - " model = pytorch_gemma_model.PyTorchGemmaLanguageModel(\n", - " model_name=MODEL_NAME)\n", - " else:\n", - " raise ValueError(f'Unrecognized api type: {API_TYPE}')\n", + "if DISABLE_LANGUAGE_MODEL:\n", + " model = no_language_model.NoLanguageModel()\n", + "elif API_TYPE == 'amazon_bedrock':\n", + " model = amazon_bedrock_model.AmazonBedrockLanguageModel(MODEL_NAME)\n", + "elif API_TYPE == 'google_aistudio_model':\n", + " model = google_aistudio_model.GoogleAIStudioLanguageModel(MODEL_NAME)\n", + "elif API_TYPE == 'langchain_ollama':\n", + " model = langchain_ollama_model.LangchainOllamaLanguageModel(MODEL_NAME)\n", + "elif API_TYPE == 'mistral':\n", + " model = mistral_model.MistralLanguageModel(MODEL_NAME)\n", + "elif API_TYPE == 'ollama':\n", + " model = ollama_model.OllamaLanguageModel(MODEL_NAME)\n", + "elif API_TYPE == 'openai':\n", + " model = gpt_model.GptLanguageModel(MODEL_NAME)\n", + "elif API_TYPE == 'pytorch_gemma':\n", + " model = pytorch_gemma_model.PyTorchGemmaLanguageModel(MODEL_NAME)\n", "else:\n", - " model = no_language_model.NoLanguageModel()" + " raise ValueError(f'Unrecognized api type: {API_TYPE}')" ] }, { diff --git a/examples/requirements.txt b/examples/requirements.txt index 0320ad01..6bd58b4b 100644 --- a/examples/requirements.txt +++ b/examples/requirements.txt @@ -5,7 +5,5 @@ IPython matplotlib ml_collections numpy -openai -pandas sentence_transformers termcolor diff --git a/examples/tutorials/agent_tutorial.ipynb b/examples/tutorials/agent_tutorial.ipynb index 83c5bcc5..b39d5112 100644 --- a/examples/tutorials/agent_tutorial.ipynb +++ b/examples/tutorials/agent_tutorial.ipynb @@ -73,7 +73,6 @@ "import random\n", "\n", "import numpy as np\n", - "import pandas as pd\n", "import sentence_transformers\n", "\n", "from IPython import display\n", diff --git a/setup.py b/setup.py index 01faf7fd..4aadc6d8 100644 --- a/setup.py +++ b/setup.py @@ -67,7 +67,6 @@ def _remove_excluded(description: str) -> str: package_data={}, python_requires='>=3.11', install_requires=( - # TODO: b/312199199 - remove some requirements. 'absl-py', 'boto3', 'google-cloud-aiplatform', @@ -83,7 +82,6 @@ def _remove_excluded(description: str) -> str: 'python-dateutil', 'reactivex', 'retry', - 'scipy', 'termcolor', 'transformers', 'typing-extensions',