From 10f4e87283e68ac59fa7f0728fa96918b36648ea Mon Sep 17 00:00:00 2001
From: Li Yin
Date: Mon, 9 Sep 2024 18:26:50 -0700
Subject: [PATCH 01/24] add evaluation
---
README.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/README.md b/README.md
index 7d9eb63a..cbf15396 100644
--- a/README.md
+++ b/README.md
@@ -29,6 +29,7 @@
Models |
Retrievers |
Agents |
+ LLM evaluation |
Trainer & Optimizers
From ce682e2161fbe6c7ea477e0640a1ef5a0acf0010 Mon Sep 17 00:00:00 2001
From: ajithvcoder
Date: Wed, 11 Sep 2024 07:10:26 +0530
Subject: [PATCH 02/24] refactor: update generator usage notebook with adalflow
keyword change and fix import issues
---
use_cases/generator/basic.ipynb | 210 ++++++++++++++++++--------------
1 file changed, 120 insertions(+), 90 deletions(-)
diff --git a/use_cases/generator/basic.ipynb b/use_cases/generator/basic.ipynb
index 8c2385ac..117f3919 100644
--- a/use_cases/generator/basic.ipynb
+++ b/use_cases/generator/basic.ipynb
@@ -1,5 +1,12 @@
{
"cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Basic Generator Usage"
+ ]
+ },
{
"cell_type": "markdown",
"metadata": {},
@@ -17,6 +24,7 @@
"source": [
"In default, the generator uses a default prompt template. It has these varaibles:\n",
"\n",
+ "```\n",
"LIGHTRAG_DEFAULT_PROMPT_ARGS = [\n",
" \"task_desc_str\",\n",
" \"output_format_str\",\n",
@@ -27,17 +35,30 @@
" \"steps_str\",\n",
" \"input_str\",\n",
" \"output_str\",\n",
- "]"
+ "]\n",
+ "```"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 1,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"# first, let's set up the library log just in case, in default at INFO level\n",
- "from utils.logger import get_logger\n",
+ "from adalflow.utils.logger import get_logger\n",
+ "\n",
"get_logger()"
]
},
@@ -50,25 +71,23 @@
"name": "stdout",
"output_type": "stream",
"text": [
- "2024-06-09 22:06:49 - INFO - [prompt_builder.py:82:__init__] - Prompt has variables: ['chat_history_str', 'task_desc_str', 'steps_str', 'examples_str', 'tools_str', 'context_str', 'output_str', 'output_format_str', 'input_str']\n",
- "2024-06-09 22:06:49 - INFO - [generator.py:194:call] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:06:49 - INFO - [generator.py:195:call] - model_kwargs: {}\n",
- "2024-06-09 22:06:49 - INFO - [openai_client.py:122:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': '\\nWhat is the capital of France?\\n'}]}\n",
- "2024-06-09 22:06:49 - INFO - [_client.py:1026:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:06:49 - INFO - [generator.py:203:call] - output: GeneratorOutput(data='The capital of France is Paris.', error=None, raw_response='The capital of France is Paris.')\n",
- "GeneratorOutput(data='The capital of France is Paris.', error=None, raw_response='The capital of France is Paris.')\n"
+ "2024-09-11 06:28:35 - prompt_builder - INFO - [prompt_builder.py:65:__init__] - Prompt has variables: ['steps_str', 'context_str', 'output_format_str', 'input_format_str', 'examples_str', 'chat_history_str', 'task_desc_str', 'tools_str', 'input_str']\n",
+ "2024-09-11 06:28:35 - generator - INFO - [generator.py:141:__init__] - Generator Generator initialized.\n",
+ "2024-09-11 06:28:35 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': '\\nYou are a helpful assistant.\\n\\n\\nWhat is the capital of France?\\n'}]}\n",
+ "2024-09-11 06:28:36 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:28:36 - generator - INFO - [generator.py:773:call] - output: GeneratorOutput(id=None, data='The capital of France is Paris.', error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=48, total_tokens=55), raw_response='The capital of France is Paris.', metadata=None)\n",
+ "GeneratorOutput(id=None, data='The capital of France is Paris.', error=None, usage=CompletionUsage(completion_tokens=7, prompt_tokens=48, total_tokens=55), raw_response='The capital of France is Paris.', metadata=None)\n"
]
}
],
"source": [
"from adalflow.core import Generator\n",
"from adalflow.components.model_client import OpenAIClient\n",
- "from adalflow.utils import setup_env # ensure you have .env with OPENAI_API_KEY\n",
+ "from adalflow.utils import setup_env # ensure you have .env with OPENAI_API_KEY\n",
"\n",
+ "setup_env(\".env\")\n",
"query = \"What is the capital of France?\"\n",
- "model_kwargs = {\n",
- " \"model\": \"gpt-3.5-turbo\"\n",
- "}\n",
+ "model_kwargs = {\"model\": \"gpt-3.5-turbo\"}\n",
"generator = Generator(model_client=OpenAIClient(), model_kwargs=model_kwargs)\n",
"prompt_kwargs = {\n",
" \"input_str\": query,\n",
@@ -95,12 +114,25 @@
"output_type": "stream",
"text": [
"Prompt:\n",
- "\n",
- "\n",
+ "______________________\n",
+ "\n",
+ "You are a helpful assistant.\n",
+ "\n",
+ "\n",
"What is the capital of France?\n",
- "\n",
+ "\n",
"\n"
]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "'\\nYou are a helpful assistant.\\n\\n\\nWhat is the capital of France?\\n\\n'"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
}
],
"source": [
@@ -117,61 +149,57 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "2024-06-09 22:09:43 - INFO - [prompt_builder.py:82:__init__] - Prompt has variables: ['input_str']\n",
- "2024-06-09 22:09:43 - INFO - [generator.py:194:call] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:09:43 - INFO - [generator.py:195:call] - model_kwargs: {}\n"
- ]
- },
- {
- "name": "stdout",
- "output_type": "stream",
- "text": [
- "2024-06-09 22:09:43 - INFO - [openai_client.py:122:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' Your are an assistant with a great sense of humor. User: What is the capital of France?. You:'}]}\n",
- "2024-06-09 22:09:44 - INFO - [_client.py:1026:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:09:44 - INFO - [generator.py:203:call] - output: GeneratorOutput(data='The capital of France is Paris. It\\'s the city of love, pastries, and baguettes, so pack your beret and get ready to say \"ooh la la!\"', error=None, raw_response='The capital of France is Paris. It\\'s the city of love, pastries, and baguettes, so pack your beret and get ready to say \"ooh la la!\"')\n",
- "GeneratorOutput(data='The capital of France is Paris. It\\'s the city of love, pastries, and baguettes, so pack your beret and get ready to say \"ooh la la!\"', error=None, raw_response='The capital of France is Paris. It\\'s the city of love, pastries, and baguettes, so pack your beret and get ready to say \"ooh la la!\"')\n"
+ "2024-09-11 06:29:25 - prompt_builder - INFO - [prompt_builder.py:65:__init__] - Prompt has variables: ['input_str']\n",
+ "2024-09-11 06:29:25 - generator - INFO - [generator.py:141:__init__] - Generator Generator initialized.\n",
+ "2024-09-11 06:29:25 - openai_client - INFO - [openai_client.py:279:call] - api_kwargs: {'model': 'gpt-3.5-turbo', 'messages': [{'role': 'system', 'content': ' Your are an assistant with a great sense of humor. User: What is the capital of France?. You:'}]}\n",
+ "2024-09-11 06:29:26 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.openai.com/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:29:26 - generator - INFO - [generator.py:773:call] - output: GeneratorOutput(id=None, data='I\\'m not sure, but I\\'ve heard it\\'s pronounced \"Paris\". ', error=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=34, total_tokens=50), raw_response='I\\'m not sure, but I\\'ve heard it\\'s pronounced \"Paris\". ', metadata=None)\n",
+ "GeneratorOutput(id=None, data='I\\'m not sure, but I\\'ve heard it\\'s pronounced \"Paris\". ', error=None, usage=CompletionUsage(completion_tokens=16, prompt_tokens=34, total_tokens=50), raw_response='I\\'m not sure, but I\\'ve heard it\\'s pronounced \"Paris\". ', metadata=None)\n"
]
}
],
"source": [
"template = \"\"\" Your are an assistant with a great sense of humor. User: {{input_str}}. You:\"\"\"\n",
"\n",
- "generator2 = Generator(model_client=OpenAIClient(), model_kwargs=model_kwargs, template=template)\n",
+ "generator2 = Generator(\n",
+ " model_client=OpenAIClient(), model_kwargs=model_kwargs, template=template\n",
+ ")\n",
"response = generator2(prompt_kwargs=prompt_kwargs)\n",
"print(response)"
]
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "2024-06-09 22:12:28 - INFO - [prompt_builder.py:82:__init__] - Prompt has variables: ['input_str']\n",
- "2024-06-09 22:12:28 - INFO - [generator.py:194:call] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:12:28 - INFO - [generator.py:195:call] - model_kwargs: {}\n",
- "2024-06-09 22:12:29 - INFO - [_client.py:1026:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:12:29 - INFO - [generator.py:203:call] - output: GeneratorOutput(data='Bonjour! The capital of France is indeed Paris! But did you know that Paris is also the city of love, famous for its Eiffel Tower, Louvre Museum, and, of course, croissants...', error=None, raw_response='Bonjour! The capital of France is indeed Paris! But did you know that Paris is also the city of love, famous for its Eiffel Tower, Louvre Museum, and, of course, croissants...')\n",
- "GeneratorOutput(data='Bonjour! The capital of France is indeed Paris! But did you know that Paris is also the city of love, famous for its Eiffel Tower, Louvre Museum, and, of course, croissants...', error=None, raw_response='Bonjour! The capital of France is indeed Paris! But did you know that Paris is also the city of love, famous for its Eiffel Tower, Louvre Museum, and, of course, croissants...')\n"
+ "2024-09-11 06:29:54 - prompt_builder - INFO - [prompt_builder.py:65:__init__] - Prompt has variables: ['input_str']\n",
+ "2024-09-11 06:29:54 - generator - INFO - [generator.py:141:__init__] - Generator Generator initialized.\n",
+ "2024-09-11 06:29:54 - _client - INFO - [_client.py:1038:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:29:54 - generator - INFO - [generator.py:773:call] - output: GeneratorOutput(id=None, data='Bonjour! But let me guess, you\\'re not asking me because you just want to chat (although, let\\'s be real, I\\'m fabulous at conversation). No, I\\'m guessing you\\'re asking because you need to polish off your geography skills and you\\'ve got a Jeopardy! audition coming up, oui?\\n\\nSo, the capital of France (drumroll, please)... is PARIS! Voilà! You\\'re welcome. Now, if you\\'ll excuse me, I have to go practice my \"Ooh la la\"s in the mirror.', error=None, usage=CompletionUsage(completion_tokens=114, prompt_tokens=37, total_tokens=151), raw_response='Bonjour! But let me guess, you\\'re not asking me because you just want to chat (although, let\\'s be real, I\\'m fabulous at conversation). No, I\\'m guessing you\\'re asking because you need to polish off your geography skills and you\\'ve got a Jeopardy! audition coming up, oui?\\n\\nSo, the capital of France (drumroll, please)... is PARIS! Voilà! You\\'re welcome. Now, if you\\'ll excuse me, I have to go practice my \"Ooh la la\"s in the mirror.', metadata=None)\n",
+ "GeneratorOutput(id=None, data='Bonjour! But let me guess, you\\'re not asking me because you just want to chat (although, let\\'s be real, I\\'m fabulous at conversation). No, I\\'m guessing you\\'re asking because you need to polish off your geography skills and you\\'ve got a Jeopardy! audition coming up, oui?\\n\\nSo, the capital of France (drumroll, please)... is PARIS! Voilà! You\\'re welcome. Now, if you\\'ll excuse me, I have to go practice my \"Ooh la la\"s in the mirror.', error=None, usage=CompletionUsage(completion_tokens=114, prompt_tokens=37, total_tokens=151), raw_response='Bonjour! But let me guess, you\\'re not asking me because you just want to chat (although, let\\'s be real, I\\'m fabulous at conversation). No, I\\'m guessing you\\'re asking because you need to polish off your geography skills and you\\'ve got a Jeopardy! audition coming up, oui?\\n\\nSo, the capital of France (drumroll, please)... is PARIS! Voilà! You\\'re welcome. Now, if you\\'ll excuse me, I have to go practice my \"Ooh la la\"s in the mirror.', metadata=None)\n"
]
}
],
"source": [
"# Let us use llama3 from groq\n",
- "from lightrag.components.model_client import GroqAPIClient\n",
+ "from adalflow.components.model_client import GroqAPIClient\n",
"\n",
"groq_model_kwargs = {\"model\": \"llama3-8b-8192\"}\n",
- "generator3 = Generator(model_client=GroqAPIClient(), model_kwargs=groq_model_kwargs, template=template)\n",
+ "generator3 = Generator(\n",
+ " model_client=GroqAPIClient(), model_kwargs=groq_model_kwargs, template=template\n",
+ ")\n",
"\n",
"response = generator3(prompt_kwargs=prompt_kwargs)\n",
"print(response)"
@@ -179,78 +207,80 @@
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:217:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
- "2024-06-09 22:20:08 - INFO - [generator.py:218:acall] - model_kwargs: {}\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data=\"Bonjour! The capital of France is Paris, of course! But did you know that Paris is actually the City of Love? At least, that's what the tourists keep telling me.\", error=None, raw_response=\"Bonjour! The capital of France is Paris, of course! But did you know that Paris is actually the City of Love? At least, that's what the tourists keep telling me.\")\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data='Bien sûr! The capital of France is... (drumroll, please)... Paris!', error=None, raw_response='Bien sûr! The capital of France is... (drumroll, please)... Paris!')\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data='Bonjour!\\n\\nThe capital of France is none other than Paris! (or as the French like to call it, \"The City of Love and Croissants\").\\n\\nYou know what they say: Paris is always a good idea...', error=None, raw_response='Bonjour!\\n\\nThe capital of France is none other than Paris! (or as the French like to call it, \"The City of Love and Croissants\").\\n\\nYou know what they say: Paris is always a good idea...')\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data=\"Bonjour! The capital of France is Paris, of course! But did you know that it's also the City of Love, where the Eiffel Tower stands tall and the croissants are always fresh?\", error=None, raw_response=\"Bonjour! The capital of France is Paris, of course! But did you know that it's also the City of Love, where the Eiffel Tower stands tall and the croissants are always fresh?\")\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data='Bonjour! The capital of France is Paris, naturally!', error=None, raw_response='Bonjour! The capital of France is Paris, naturally!')\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data='Bonjour! The capital of France is... (drumroll please)... Paris! And if you\\'re feeling fancy, you can also say \"Je suis à Paris, comment allez-vous?\" which roughly translates to \"I\\'m in Paris, how are you?\" but actually means \"I\\'m in Paris, drop everything and let\\'s go party!\"', error=None, raw_response='Bonjour! The capital of France is... (drumroll please)... Paris! And if you\\'re feeling fancy, you can also say \"Je suis à Paris, comment allez-vous?\" which roughly translates to \"I\\'m in Paris, how are you?\" but actually means \"I\\'m in Paris, drop everything and let\\'s go party!\"')\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data=\"Bonjour! The capital of France is... (drumroll please)... PARIS! Nope, just kidding, that would be too easy! Seriously though, the capital of France is indeed Paris! But let's keep the suspense going, shall we?\", error=None, raw_response=\"Bonjour! The capital of France is... (drumroll please)... PARIS! Nope, just kidding, that would be too easy! Seriously though, the capital of France is indeed Paris! But let's keep the suspense going, shall we?\")\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data=\"Bonjour! The capital of France is... (drumroll please)... Paris! But seriously, you can't even imagine how much cheese goes into making decisions in that city.\", error=None, raw_response=\"Bonjour! The capital of France is... (drumroll please)... Paris! But seriously, you can't even imagine how much cheese goes into making decisions in that city.\")\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data='Bonjour! The capital of France is Paris, bien sûr! But did you know that Paris is often referred to as the \"City of Love\"? Maybe it\\'s because the Eiffel Tower is the most romantic landmark in the world... or maybe it\\'s because the French have a certain... je ne sais quoi when it comes to romance.', error=None, raw_response='Bonjour! The capital of France is Paris, bien sûr! But did you know that Paris is often referred to as the \"City of Love\"? Maybe it\\'s because the Eiffel Tower is the most romantic landmark in the world... or maybe it\\'s because the French have a certain... je ne sais quoi when it comes to romance.')\n",
- "2024-06-09 22:20:09 - INFO - [_client.py:1773:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
- "2024-06-09 22:20:09 - INFO - [generator.py:225:acall] - output: GeneratorOutput(data='Bonjour! The capital of France is Paris, darling! But did you know that Paris is also the city of love, art, and croissants?', error=None, raw_response='Bonjour! The capital of France is Paris, darling! But did you know that Paris is also the city of love, art, and croissants?')\n",
- "Time taken for 10 async calls: 0.8704450130462646\n",
- "[GeneratorOutput(data='Bonjour! The capital of France is Paris, darling! But did you know that Paris is also the city of love, art, and croissants?', error=None, raw_response='Bonjour! The capital of France is Paris, darling! But did you know that Paris is also the city of love, art, and croissants?'), GeneratorOutput(data=\"Bonjour! The capital of France is... (drumroll please)... PARIS! Nope, just kidding, that would be too easy! Seriously though, the capital of France is indeed Paris! But let's keep the suspense going, shall we?\", error=None, raw_response=\"Bonjour! The capital of France is... (drumroll please)... PARIS! Nope, just kidding, that would be too easy! Seriously though, the capital of France is indeed Paris! But let's keep the suspense going, shall we?\"), GeneratorOutput(data='Bien sûr! The capital of France is... (drumroll, please)... Paris!', error=None, raw_response='Bien sûr! The capital of France is... (drumroll, please)... Paris!'), GeneratorOutput(data=\"Bonjour! The capital of France is Paris, of course! But did you know that Paris is actually the City of Love? At least, that's what the tourists keep telling me.\", error=None, raw_response=\"Bonjour! The capital of France is Paris, of course! But did you know that Paris is actually the City of Love? At least, that's what the tourists keep telling me.\"), GeneratorOutput(data=\"Bonjour! The capital of France is... (drumroll please)... Paris! But seriously, you can't even imagine how much cheese goes into making decisions in that city.\", error=None, raw_response=\"Bonjour! The capital of France is... (drumroll please)... Paris! But seriously, you can't even imagine how much cheese goes into making decisions in that city.\"), GeneratorOutput(data='Bonjour! The capital of France is Paris, naturally!', error=None, raw_response='Bonjour! The capital of France is Paris, naturally!'), GeneratorOutput(data=\"Bonjour! The capital of France is Paris, of course! But did you know that it's also the City of Love, where the Eiffel Tower stands tall and the croissants are always fresh?\", error=None, raw_response=\"Bonjour! The capital of France is Paris, of course! But did you know that it's also the City of Love, where the Eiffel Tower stands tall and the croissants are always fresh?\"), GeneratorOutput(data='Bonjour! The capital of France is... (drumroll please)... Paris! And if you\\'re feeling fancy, you can also say \"Je suis à Paris, comment allez-vous?\" which roughly translates to \"I\\'m in Paris, how are you?\" but actually means \"I\\'m in Paris, drop everything and let\\'s go party!\"', error=None, raw_response='Bonjour! The capital of France is... (drumroll please)... Paris! And if you\\'re feeling fancy, you can also say \"Je suis à Paris, comment allez-vous?\" which roughly translates to \"I\\'m in Paris, how are you?\" but actually means \"I\\'m in Paris, drop everything and let\\'s go party!\"'), GeneratorOutput(data='Bonjour! The capital of France is Paris, bien sûr! But did you know that Paris is often referred to as the \"City of Love\"? Maybe it\\'s because the Eiffel Tower is the most romantic landmark in the world... or maybe it\\'s because the French have a certain... je ne sais quoi when it comes to romance.', error=None, raw_response='Bonjour! The capital of France is Paris, bien sûr! But did you know that Paris is often referred to as the \"City of Love\"? Maybe it\\'s because the Eiffel Tower is the most romantic landmark in the world... or maybe it\\'s because the French have a certain... je ne sais quoi when it comes to romance.'), GeneratorOutput(data='Bonjour!\\n\\nThe capital of France is none other than Paris! (or as the French like to call it, \"The City of Love and Croissants\").\\n\\nYou know what they say: Paris is always a good idea...', error=None, raw_response='Bonjour!\\n\\nThe capital of France is none other than Paris! (or as the French like to call it, \"The City of Love and Croissants\").\\n\\nYou know what they say: Paris is always a good idea...')]\n"
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:789:acall] - prompt_kwargs: {'input_str': 'What is the capital of France?'}\n",
+ "2024-09-11 06:30:13 - generator - INFO - [generator.py:790:acall] - model_kwargs: {}\n",
+ "2024-09-11 06:30:14 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:14 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data='Bonjour!', error=None, usage=CompletionUsage(completion_tokens=3, prompt_tokens=37, total_tokens=40), raw_response='Bonjour!', metadata=None)\n",
+ "2024-09-11 06:30:14 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:14 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data=\"Bonjour! *sips imaginary café au lait* The capital of France, bien sûr! *dramatic flair* It's PARIS, darling! Where the Eiffel Tower is always twinkling, croissants are always flaky, and the fashion is always tres chic!\", error=None, usage=CompletionUsage(completion_tokens=60, prompt_tokens=37, total_tokens=97), raw_response=\"Bonjour! *sips imaginary café au lait* The capital of France, bien sûr! *dramatic flair* It's PARIS, darling! Where the Eiffel Tower is always twinkling, croissants are always flaky, and the fashion is always tres chic!\", metadata=None)\n",
+ "2024-09-11 06:30:14 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:14 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data=\"Bonjour! You want to know the answer to this one, don't you? Well, let me tell you, it's not Paris... just kidding, it's actually Paris! But seriously, if you want to impress your French friends with some high-stakes trivia, just remember that the capital of France is indeed the City of Love, the City of Lights, and the City of Wining and Dining (and maybe a few croissants).\", error=None, usage=CompletionUsage(completion_tokens=91, prompt_tokens=37, total_tokens=128), raw_response=\"Bonjour! You want to know the answer to this one, don't you? Well, let me tell you, it's not Paris... just kidding, it's actually Paris! But seriously, if you want to impress your French friends with some high-stakes trivia, just remember that the capital of France is indeed the City of Love, the City of Lights, and the City of Wining and Dining (and maybe a few croissants).\", metadata=None)\n",
+ "2024-09-11 06:30:14 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:14 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:14 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data='Bonjour! The capital of France is... (drumroll) Paris! And if you\\'re wondering, the correct pronunciation is \"Ah-reees,\" not \"Purdie-air-iss.\" Don\\'t worry, I won\\'t make fun of you... unless you ask me to.', error=None, usage=CompletionUsage(completion_tokens=59, prompt_tokens=37, total_tokens=96), raw_response='Bonjour! The capital of France is... (drumroll) Paris! And if you\\'re wondering, the correct pronunciation is \"Ah-reees,\" not \"Purdie-air-iss.\" Don\\'t worry, I won\\'t make fun of you... unless you ask me to.', metadata=None)\n",
+ "2024-09-11 06:30:14 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data=\"Mon ami, you want to know the capital of France? Well, let me tell you, it's not Rome, it's not Berlin, it's not even Paris (okay, it's actually Paris, don't be smarty pants), it's... (drumroll please)... PARIS! But seriously, if you don't know that by now, we should probably have a chat about your geography skills (just kidding, it's a tough question, I've been there too). But in all seriousness, the answer is indeed Paris! Vive la France!\", error=None, usage=CompletionUsage(completion_tokens=114, prompt_tokens=37, total_tokens=151), raw_response=\"Mon ami, you want to know the capital of France? Well, let me tell you, it's not Rome, it's not Berlin, it's not even Paris (okay, it's actually Paris, don't be smarty pants), it's... (drumroll please)... PARIS! But seriously, if you don't know that by now, we should probably have a chat about your geography skills (just kidding, it's a tough question, I've been there too). But in all seriousness, the answer is indeed Paris! Vive la France!\", metadata=None)\n",
+ "2024-09-11 06:30:14 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:14 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data='Bonjour! The capital of France is, of course, Paris! But don\\'t worry if you forgot, it\\'s not like you\\'re a \"fowl\" in the sense that you\\'re not aware of it... okay, I\\'ll stop with the bird puns now.', error=None, usage=CompletionUsage(completion_tokens=56, prompt_tokens=37, total_tokens=93), raw_response='Bonjour! The capital of France is, of course, Paris! But don\\'t worry if you forgot, it\\'s not like you\\'re a \"fowl\" in the sense that you\\'re not aware of it... okay, I\\'ll stop with the bird puns now.', metadata=None)\n",
+ "2024-09-11 06:30:14 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:14 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data=\"Bonjour!\\n\\nOf course, the capital of France is... (drumroll please)... Paris! But let me add, the fashion capital of France is actually Chanel, the cuisine capital is McDonald's, and the procrastination capital is, um, never leaving the Louvre museum.\", error=None, usage=CompletionUsage(completion_tokens=57, prompt_tokens=37, total_tokens=94), raw_response=\"Bonjour!\\n\\nOf course, the capital of France is... (drumroll please)... Paris! But let me add, the fashion capital of France is actually Chanel, the cuisine capital is McDonald's, and the procrastination capital is, um, never leaving the Louvre museum.\", metadata=None)\n",
+ "2024-09-11 06:30:14 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:14 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data='Bonjour!', error=None, usage=CompletionUsage(completion_tokens=3, prompt_tokens=37, total_tokens=40), raw_response='Bonjour!', metadata=None)\n",
+ "2024-09-11 06:30:15 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:15 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data=\"Bonjour! The capital of France is... (drumroll please)... Paris! Oui, oui, it's a city so charming, you'll feel like you're in a romantic comedy... or at least, that's what the movies would have you believe.\", error=None, usage=CompletionUsage(completion_tokens=54, prompt_tokens=37, total_tokens=91), raw_response=\"Bonjour! The capital of France is... (drumroll please)... Paris! Oui, oui, it's a city so charming, you'll feel like you're in a romantic comedy... or at least, that's what the movies would have you believe.\", metadata=None)\n",
+ "2024-09-11 06:30:15 - _client - INFO - [_client.py:1786:_send_single_request] - HTTP Request: POST https://api.groq.com/openai/v1/chat/completions \"HTTP/1.1 200 OK\"\n",
+ "2024-09-11 06:30:15 - generator - INFO - [generator.py:812:acall] - output: GeneratorOutput(id=None, data='Bonjour! The capital of France is, of course, Paris! But did you know that Paris is so romantic, it\\'ll make you want to croon \"Ooh la la\" and drink all the coffee?', error=None, usage=CompletionUsage(completion_tokens=44, prompt_tokens=37, total_tokens=81), raw_response='Bonjour! The capital of France is, of course, Paris! But did you know that Paris is so romantic, it\\'ll make you want to croon \"Ooh la la\" and drink all the coffee?', metadata=None)\n",
+ "Time taken for 10 async calls: 1.94460129737854\n",
+ "[GeneratorOutput(id=None, data=\"Bonjour! You want to know the answer to this one, don't you? Well, let me tell you, it's not Paris... just kidding, it's actually Paris! But seriously, if you want to impress your French friends with some high-stakes trivia, just remember that the capital of France is indeed the City of Love, the City of Lights, and the City of Wining and Dining (and maybe a few croissants).\", error=None, usage=CompletionUsage(completion_tokens=91, prompt_tokens=37, total_tokens=128), raw_response=\"Bonjour! You want to know the answer to this one, don't you? Well, let me tell you, it's not Paris... just kidding, it's actually Paris! But seriously, if you want to impress your French friends with some high-stakes trivia, just remember that the capital of France is indeed the City of Love, the City of Lights, and the City of Wining and Dining (and maybe a few croissants).\", metadata=None), GeneratorOutput(id=None, data='Bonjour!', error=None, usage=CompletionUsage(completion_tokens=3, prompt_tokens=37, total_tokens=40), raw_response='Bonjour!', metadata=None), GeneratorOutput(id=None, data=\"Bonjour!\\n\\nOf course, the capital of France is... (drumroll please)... Paris! But let me add, the fashion capital of France is actually Chanel, the cuisine capital is McDonald's, and the procrastination capital is, um, never leaving the Louvre museum.\", error=None, usage=CompletionUsage(completion_tokens=57, prompt_tokens=37, total_tokens=94), raw_response=\"Bonjour!\\n\\nOf course, the capital of France is... (drumroll please)... Paris! But let me add, the fashion capital of France is actually Chanel, the cuisine capital is McDonald's, and the procrastination capital is, um, never leaving the Louvre museum.\", metadata=None), GeneratorOutput(id=None, data='Bonjour! The capital of France is, of course, Paris! But don\\'t worry if you forgot, it\\'s not like you\\'re a \"fowl\" in the sense that you\\'re not aware of it... okay, I\\'ll stop with the bird puns now.', error=None, usage=CompletionUsage(completion_tokens=56, prompt_tokens=37, total_tokens=93), raw_response='Bonjour! The capital of France is, of course, Paris! But don\\'t worry if you forgot, it\\'s not like you\\'re a \"fowl\" in the sense that you\\'re not aware of it... okay, I\\'ll stop with the bird puns now.', metadata=None), GeneratorOutput(id=None, data=\"Bonjour! The capital of France is... (drumroll please)... Paris! Oui, oui, it's a city so charming, you'll feel like you're in a romantic comedy... or at least, that's what the movies would have you believe.\", error=None, usage=CompletionUsage(completion_tokens=54, prompt_tokens=37, total_tokens=91), raw_response=\"Bonjour! The capital of France is... (drumroll please)... Paris! Oui, oui, it's a city so charming, you'll feel like you're in a romantic comedy... or at least, that's what the movies would have you believe.\", metadata=None), GeneratorOutput(id=None, data='Bonjour! The capital of France is... (drumroll) Paris! And if you\\'re wondering, the correct pronunciation is \"Ah-reees,\" not \"Purdie-air-iss.\" Don\\'t worry, I won\\'t make fun of you... unless you ask me to.', error=None, usage=CompletionUsage(completion_tokens=59, prompt_tokens=37, total_tokens=96), raw_response='Bonjour! The capital of France is... (drumroll) Paris! And if you\\'re wondering, the correct pronunciation is \"Ah-reees,\" not \"Purdie-air-iss.\" Don\\'t worry, I won\\'t make fun of you... unless you ask me to.', metadata=None), GeneratorOutput(id=None, data=\"Mon ami, you want to know the capital of France? Well, let me tell you, it's not Rome, it's not Berlin, it's not even Paris (okay, it's actually Paris, don't be smarty pants), it's... (drumroll please)... PARIS! But seriously, if you don't know that by now, we should probably have a chat about your geography skills (just kidding, it's a tough question, I've been there too). But in all seriousness, the answer is indeed Paris! Vive la France!\", error=None, usage=CompletionUsage(completion_tokens=114, prompt_tokens=37, total_tokens=151), raw_response=\"Mon ami, you want to know the capital of France? Well, let me tell you, it's not Rome, it's not Berlin, it's not even Paris (okay, it's actually Paris, don't be smarty pants), it's... (drumroll please)... PARIS! But seriously, if you don't know that by now, we should probably have a chat about your geography skills (just kidding, it's a tough question, I've been there too). But in all seriousness, the answer is indeed Paris! Vive la France!\", metadata=None), GeneratorOutput(id=None, data='Bonjour!', error=None, usage=CompletionUsage(completion_tokens=3, prompt_tokens=37, total_tokens=40), raw_response='Bonjour!', metadata=None), GeneratorOutput(id=None, data='Bonjour! The capital of France is, of course, Paris! But did you know that Paris is so romantic, it\\'ll make you want to croon \"Ooh la la\" and drink all the coffee?', error=None, usage=CompletionUsage(completion_tokens=44, prompt_tokens=37, total_tokens=81), raw_response='Bonjour! The capital of France is, of course, Paris! But did you know that Paris is so romantic, it\\'ll make you want to croon \"Ooh la la\" and drink all the coffee?', metadata=None), GeneratorOutput(id=None, data=\"Bonjour! *sips imaginary café au lait* The capital of France, bien sûr! *dramatic flair* It's PARIS, darling! Where the Eiffel Tower is always twinkling, croissants are always flaky, and the fashion is always tres chic!\", error=None, usage=CompletionUsage(completion_tokens=60, prompt_tokens=37, total_tokens=97), raw_response=\"Bonjour! *sips imaginary café au lait* The capital of France, bien sûr! *dramatic flair* It's PARIS, darling! Where the Eiffel Tower is always twinkling, croissants are always flaky, and the fashion is always tres chic!\", metadata=None)]\n"
]
}
],
"source": [
"# Lets do 10 async calls at once, lets use GroqAPIClient\n",
- "import nest_asyncio # import asyncio, use nest_asyncio.apply() if you are in jupyter notebook\n",
+ "import nest_asyncio # import asyncio, use nest_asyncio.apply() if you are in jupyter notebook\n",
"import asyncio\n",
+ "\n",
"nest_asyncio.apply()\n",
"\n",
"import time\n",
"from typing import List\n",
"\n",
+ "\n",
"async def make_async_calls(queries: List[str]):\n",
" calls = [generator3.acall(prompt_kwargs={\"input_str\": query}) for query in queries]\n",
" responses = await asyncio.gather(*calls)\n",
" return responses\n",
"\n",
+ "\n",
"queries = [query] * 10\n",
"start = time.time()\n",
"responses = asyncio.run(make_async_calls(queries))\n",
"print(f\"Time taken for 10 async calls: {time.time() - start}\")\n",
- "print(responses)\n",
- "\n"
+ "print(responses)"
]
},
{
@@ -263,9 +293,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "my-project-kernel",
+ "display_name": "openc",
"language": "python",
- "name": "my-project-kernel"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
@@ -277,7 +307,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.12.4"
+ "version": "3.10.14"
}
},
"nbformat": 4,
From 53d5bb098212efeb5a86cdb01b85d19ae2546c23 Mon Sep 17 00:00:00 2001
From: Li Yin
Date: Wed, 11 Sep 2024 01:33:53 -0700
Subject: [PATCH 03/24] add back star github banner in the documentation
---
docs/source/conf.py | 84 +++++++++++++--------------
docs/source/tutorials/evaluation.rst | 1 +
docs/source/use_cases/build_a_rag.rst | 14 +++++
docs/source/use_cases/index.rst | 42 +++++++-------
4 files changed, 78 insertions(+), 63 deletions(-)
diff --git a/docs/source/conf.py b/docs/source/conf.py
index a0e60d68..aa56715b 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -53,48 +53,48 @@
"icon": "fa-brands fa-discord",
},
],
- # "announcement": """
- #
- # """,
+ "announcement": """
+
+ """,
"navbar_end": [
"navbar-icon-links.html",
"search-field.html",
diff --git a/docs/source/tutorials/evaluation.rst b/docs/source/tutorials/evaluation.rst
index 1f14bf84..0d470143 100644
--- a/docs/source/tutorials/evaluation.rst
+++ b/docs/source/tutorials/evaluation.rst
@@ -632,3 +632,4 @@ References
- `Hugging Face Metrics `_
- `RAGAS `_
- `G-eval `_
+ - `Sklearn `_
diff --git a/docs/source/use_cases/build_a_rag.rst b/docs/source/use_cases/build_a_rag.rst
index ce15bb49..dad41e9c 100644
--- a/docs/source/use_cases/build_a_rag.rst
+++ b/docs/source/use_cases/build_a_rag.rst
@@ -15,6 +15,14 @@
Designing RAG
================
+
+.. figure:: /_static/images/generator.png
+ :align: center
+ :alt: AdalFlow generator design
+ :width: 700px
+
+ Generator - The Orchestrator for LLM Prediction
+
Retrieval-Augmented Generation (RAG) is a paradigm that combines the strengths of retrieval and generation models.
Given a user query, RAG retrieves relevant passages from a large corpus and then generates a response based on the retrieved passages.
This formulation opens up a wide range of use cases such as conversational search engine, question answering on a customized knowledge base,
@@ -39,3 +47,9 @@ For each use case, we need to answer:
7. How do I auto-optimize the RAG pipeline with In-context learning(ICLs) with zero-shot prompting and few-shot prompting?
8. What about finetuning? How to do it and would it be more token efficient or more effective?
+
+
+References
+------------------------------------------
+.. [1] Retrieval-Augmented Generation for Knowledge-Intensive NLP Tasks:https://arxiv.org/abs/2005.11401
+.. [2] RAG playbook: https://playbooks.capdev.govtext.gov.sg/
diff --git a/docs/source/use_cases/index.rst b/docs/source/use_cases/index.rst
index 419bdf8b..5dfb654e 100644
--- a/docs/source/use_cases/index.rst
+++ b/docs/source/use_cases/index.rst
@@ -8,27 +8,27 @@ Use Cases
We will build use cases end-to-end, ranging from classification (classical NLP tasks) to question answering, retrieval-augmented generation (RAG), and multi-generator pipelines.
-..
- RAG
- ----------------
- .. list-table::
- :widths: 30 70
- :header-rows: 1
-
- * - Part
- - Description
- * - :doc:`build_a_rag`
- - Designing a RAG pipeline, from offline data processing to online inference.
- * - :doc:`eval_a_rag`
- - Question Answering with `bhh_hard_object_count` dataset, including textual-gradient descent and few-shot boostrap optimization.
-
- .. toctree::
- :maxdepth: 1
- :caption: RAG vibe
- :hidden:
-
- build_a_rag
- eval_a_rag
+
+RAG
+----------------
+.. list-table::
+ :widths: 30 70
+ :header-rows: 1
+
+ * - Part
+ - Description
+ * - :doc:`build_a_rag`
+ - Designing a RAG pipeline, from offline data processing to online inference.
+ * - :doc:`eval_a_rag`
+ - Question Answering with `bhh_hard_object_count` dataset, including textual-gradient descent and few-shot boostrap optimization.
+
+.. toctree::
+ :maxdepth: 1
+ :caption: RAG vibe
+ :hidden:
+
+ build_a_rag
+ eval_a_rag
Optimization
----------------
From 0a77b4b5c00e2781ea62becaaabe91fa58863147 Mon Sep 17 00:00:00 2001
From: Li Yin
Date: Wed, 11 Sep 2024 12:47:42 -0700
Subject: [PATCH 04/24] add govtech evaluation guideline and star history in
readme
---
README.md | 5 +++++
docs/source/tutorials/evaluation.rst | 6 ++++++
docs/source/use_cases/build_a_rag.rst | 8 ++++++++
3 files changed, 19 insertions(+)
diff --git a/README.md b/README.md
index cbf15396..ba96458d 100644
--- a/README.md
+++ b/README.md
@@ -242,3 +242,8 @@ Many existing works greatly inspired AdalFlow library! Here is a non-exhaustive
url = {https://github.com/SylphAI-Inc/LightRAG}
}
```
+
+# Star History
+
+
+[![Star History Chart](https://api.star-history.com/svg?repos=SylphAI-Inc/AdalFlow&type=Date)](https://star-history.com/#SylphAI-Inc/AdalFlow&Date)
diff --git a/docs/source/tutorials/evaluation.rst b/docs/source/tutorials/evaluation.rst
index 0d470143..7e63e037 100644
--- a/docs/source/tutorials/evaluation.rst
+++ b/docs/source/tutorials/evaluation.rst
@@ -544,6 +544,7 @@ There is one new way is to indirectly use the ground truth answers from the gene
Recall = [GT statements that can be attributed to the retrieved context] / [GT statements]
+There is also **Context Relevance** and **Context Precision** metrics in RAGAS.
LLM or model based judge for Retriever Recall
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -577,6 +578,10 @@ See the evaluation on datasets at :doc:`Evaluating a RAG Pipeline <../tutorials/
Additionally, there are more research for RAG evaluation, such as SemScore [13]_, ARES [14]_, RGB [15]_, etc.
+.. note::
+
+ GovTech Singapore provides a well-explained evaluation guideline [22]_ that aligns with our guideline but with more thereotical explanation on some metrics.
+
For Contributors
------------------------------------------
@@ -614,6 +619,7 @@ References
.. [19] Liu, Yang, et al. "Datasets for large language models: A comprehensive survey." arXiv preprint arXiv:2402.18041 (2024).
.. [20] ROUGE Deep dive: https://medium.com/nlplanet/two-minutes-nlp-learn-the-rouge-metric-by-examples-f179cc285499
.. [21] Zhu, Kunlun, et al. "RAGEval: Scenario Specific RAG Evaluation Dataset Generation Framework." arXiv preprint arXiv:2408.01262 (2024).
+.. [22] https://playbooks.capdev.govtext.gov.sg/evaluation/
.. admonition:: AdalFlow Eval API Reference
:class: highlight
diff --git a/docs/source/use_cases/build_a_rag.rst b/docs/source/use_cases/build_a_rag.rst
index dad41e9c..32435027 100644
--- a/docs/source/use_cases/build_a_rag.rst
+++ b/docs/source/use_cases/build_a_rag.rst
@@ -48,6 +48,14 @@ For each use case, we need to answer:
8. What about finetuning? How to do it and would it be more token efficient or more effective?
+First RAG Paper
+------------------
+RAG was introduced in 2020 by Lewis et al. [1]_ which is an architecture that finetunes both the query encoder (bi-encoder like most embedding models) and the generator (LLM) jointly with only final answer supervision.
+It did not mention document chunking as most of the time, their text length is usally short and also fits into the context length of the embedding models.
+As both the embedding model and LLM model scales up in terms of knowledge and parameters (400M LLM model used in the paper), RAG can achieve high performance in few-shot (prompt engineering) setup without the finetune.
+
+RAG Playbook
+------------------
References
------------------------------------------
From 668e5492db8fc3824215bea7bc65c78da03ad85d Mon Sep 17 00:00:00 2001
From: Li Yin
Date: Thu, 12 Sep 2024 00:47:48 -0700
Subject: [PATCH 05/24] RAG playbook linking all adalflow tutorials to form a
comprehensive playbook
---
README.md | 2 +
adalflow/adalflow/core/db.py | 5 +-
docs/source/tutorials/index.rst | 2 +-
docs/source/tutorials/retriever.rst | 44 ++++++--
docs/source/use_cases/build_a_rag.rst | 34 ++++++
docs/source/use_cases/index.rst | 3 +
docs/source/use_cases/rag_playbook.rst | 138 +++++++++++++++++++++++++
tutorials/retriever/__init__.py | 0
tutorials/retriever/data.py | 21 ++++
tutorials/retriever/local_db.py | 1 +
10 files changed, 239 insertions(+), 11 deletions(-)
create mode 100644 docs/source/use_cases/rag_playbook.rst
create mode 100644 tutorials/retriever/__init__.py
create mode 100644 tutorials/retriever/data.py
create mode 100644 tutorials/retriever/local_db.py
diff --git a/README.md b/README.md
index ba96458d..4421af2a 100644
--- a/README.md
+++ b/README.md
@@ -247,3 +247,5 @@ Many existing works greatly inspired AdalFlow library! Here is a non-exhaustive
[![Star History Chart](https://api.star-history.com/svg?repos=SylphAI-Inc/AdalFlow&type=Date)](https://star-history.com/#SylphAI-Inc/AdalFlow&Date)
+
+
diff --git a/adalflow/adalflow/core/db.py b/adalflow/adalflow/core/db.py
index 5062ded5..85dd4a78 100644
--- a/adalflow/adalflow/core/db.py
+++ b/adalflow/adalflow/core/db.py
@@ -126,10 +126,7 @@ def length(self):
def get_transformer_keys(self) -> List[str]:
return list(self.transformed_items.keys())
- # def get_transformed_data(self, key: str) -> List[U]:
- # """Get the transformed items by key."""
- # return self.transformed_items[key]
-
+ # TODO: combine this to fetch_transformed_items
def get_transformed_data(
self, key: str, filter_fn: Callable[[Any], bool] = lambda x: True
) -> List[U]:
diff --git a/docs/source/tutorials/index.rst b/docs/source/tutorials/index.rst
index 8abf7e56..e7111b0e 100644
--- a/docs/source/tutorials/index.rst
+++ b/docs/source/tutorials/index.rst
@@ -3,7 +3,7 @@
.. _developer_notes:
-Developer Notes
+Tutorials
=============================
.. *Why and How Each Part works*
diff --git a/docs/source/tutorials/retriever.rst b/docs/source/tutorials/retriever.rst
index 1cd39225..7e1a30dd 100644
--- a/docs/source/tutorials/retriever.rst
+++ b/docs/source/tutorials/retriever.rst
@@ -222,13 +222,8 @@ As an example, :class:`BM25Retriever`.
-Currently only :class:`BM25Retriever` needs to have its own ``save_to_file`` and ``load_from_file`` to avoid recomputation again.
-The ``FAISSRetriever`` will work with a database instead to store the embeddings and it alleviates the need for the retriever to deal with states saving.
-
In this note, we will use the following documents and queries for demonstration:
.. code-block:: python
@@ -257,6 +252,43 @@ In this note, we will use the following documents and queries for demonstration:
The first query should retrieve the first and the last document, and the second query should retrieve the second and the third document.
+Documents filtering
+--------------------
+Before using more advanced retrieval methods, it is common to filter the documents first.
+Document filtering is dependent on your data storage, whether it is in memory, local disk, or cloud database.
+For the cloud database, it is highly dependent on the database's search and filter methods. And SQL-based search is common, scalable, and efficient.
+
+If you are using `LocalDB` and `Document` as the data item, you can use the `filter` method to filter the documents.
+
+Before you pass the documents or processed document chunks and embeddings to the retriever, you can filter the documents first.
+
+.. code-block:: python
+
+ from adalflow.core.db import LocalDB
+ from adalflow.core.types import Document
+
+ db = LocalDB()
+ db.connect()
+
+ # Add the documents to the database
+ for doc in documents:
+ db.add_item(Document(**doc))
+
+ # Filter the documents
+ filtered_documents = db.filter(Document, title="Solar Panels")
+
+ print(filtered_documents)
+
+
+Retriever in Action
+--------------------
+All of our retrievers are subclassed from the base retriever, and they are located in the ``components.retriever`` module.
+You can skim through their implementations here: :ref:`retriever`.
+Currently only :class:`BM25Retriever` needs to have its own ``save_to_file`` and ``load_from_file`` to avoid recomputation again.
+The ``FAISSRetriever`` will work with a database instead to store the embeddings and it alleviates the need for the retriever to deal with states saving.
+
+
+
FAISSRetriever
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
First, let's do semantic search, here we will use in-memory :class:`FAISSRetriever`.
diff --git a/docs/source/use_cases/build_a_rag.rst b/docs/source/use_cases/build_a_rag.rst
index 32435027..69c97b20 100644
--- a/docs/source/use_cases/build_a_rag.rst
+++ b/docs/source/use_cases/build_a_rag.rst
@@ -48,6 +48,12 @@ For each use case, we need to answer:
8. What about finetuning? How to do it and would it be more token efficient or more effective?
+In this tutorial, we will cover:
+
+- We will provide resource links to form a comprehensive RAG playbook according the sota research and the best practices in the industry.
+- Build a local and model-agnostic RAG pipeline and data processing pipeline with AdalFlow library.
+- Add Query Expansion.
+
First RAG Paper
------------------
RAG was introduced in 2020 by Lewis et al. [1]_ which is an architecture that finetunes both the query encoder (bi-encoder like most embedding models) and the generator (LLM) jointly with only final answer supervision.
@@ -57,6 +63,34 @@ As both the embedding model and LLM model scales up in terms of knowledge and pa
RAG Playbook
------------------
+======================== ========================= =========================================
+RAG Pipeline Component Improvement Techniques Evaluation Metric
+======================== ========================= =========================================
+Data Preparation - Text preprocessing -
+ - Chunking Strategy
+
+Embedding - Embedding Fine-tuning -
+
+Indexing - -
+
+Retrieval - Retrieval Optimization - HIT@K
+ - Query Enhancement - MRR@K
+ - Reranking - MAP@K
+ - NDCG@K
+ - Ragas context relevancy, precision, recall
+
+Completion - Prompt Engineering - Ragas answer relevancy
+ - LLM Fine-tuning - AutoAIS
+ - ROUGE
+ - BLEU
+ - METEOR
+ - F1 Score
+ - BERTScore
+ - UniEval
+ - G-Eval
+======================== ========================= =========================================
+
+
References
------------------------------------------
.. [1] Retrieval-Augmented Generation for Knowledge-Intensive NLP Tasks:https://arxiv.org/abs/2005.11401
diff --git a/docs/source/use_cases/index.rst b/docs/source/use_cases/index.rst
index 5dfb654e..831c5e53 100644
--- a/docs/source/use_cases/index.rst
+++ b/docs/source/use_cases/index.rst
@@ -17,6 +17,8 @@ RAG
* - Part
- Description
+ * - :doc:`rag_playbook`
+ - Comprehensive RAG playbook according to the sota research and the best practices in the industry.
* - :doc:`build_a_rag`
- Designing a RAG pipeline, from offline data processing to online inference.
* - :doc:`eval_a_rag`
@@ -27,6 +29,7 @@ RAG
:caption: RAG vibe
:hidden:
+ rag_playbook
build_a_rag
eval_a_rag
diff --git a/docs/source/use_cases/rag_playbook.rst b/docs/source/use_cases/rag_playbook.rst
new file mode 100644
index 00000000..cb4419a2
--- /dev/null
+++ b/docs/source/use_cases/rag_playbook.rst
@@ -0,0 +1,138 @@
+..
+..
+..
+
+.. raw:: html
+
+
+
+RAG Playbook
+================
+
+
+
+
+In this playbook, we will provide a comprehensive RAG playbook according the sota research and the best practices in the industry.
+The outline of the playbook is as follows:
+
+- RAG Overview
+- From First RAG Paper to the diverse RAG design architecture
+- RAG design and tuning strategies for each component
+
+
+RAG Overview
+----------------
+
+.. figure:: /_static/images/generator.png
+ :align: center
+ :alt: AdalFlow generator design
+ :width: 700px
+
+ Generator - The Orchestrator for LLM Prediction
+
+Retrieval-Augmented Generation (RAG) is a paradigm that combines the strengths of retrieval and generation models.
+Given a user query, RAG retrieves relevant passages from a large corpus and then generates a response based on the retrieved passages.
+This formulation opens up a wide range of use cases such as conversational search engine, question answering on a customized knowledge base,
+customer support, fact-checking.
+RAGs eliminate the hallucination and offers a degree of transparency and interpretability via citing the sources.
+
+**First RAG Paper**
+
+RAG was introduced in 2020 by Lewis et al. [1]_ which is an architecture that finetunes both the query encoder (bi-encoder like most embedding models) and the generator (LLM) jointly with only final answer supervision.
+It did not mention document chunking as most of the time, their text length is usally short and also fits into the context length of the embedding models.
+As both the embedding model and LLM model scales up in terms of knowledge and parameters (400M LLM model used in the paper), RAG can achieve high performance in few-shot (prompt engineering) setup without the finetune.
+
+
+However, the flexibility of the RAG also means that it requires careful design and tuning to achieve optimal performance.
+For each use case, we need to answer:
+
+1. What retrieval to use? And how many stages it should be? Do we need a reranker or even LLM to help with the retrieval stages?
+
+2. Which cloud-database can go well with the retrieval strategy and be able to scale?
+
+3. How do I evaluate the performance of the RAG as a whole? And what metrics can help me understand the retrieval stage?
+
+4. Do I need query expansion or any other techniques to improve the retrieval performance?
+
+5. How do I optimize the RAG hyperparameters such as the number of retrieved passages, the size of the chunk, and the overlap between chunks, or even the chunking strategy?
+
+6. Sometimes you need to even create your own customized/finetuned embedding models. How do I do that?
+
+7. How do I auto-optimize the RAG pipeline with In-context learning(ICLs) with zero-shot prompting and few-shot prompting?
+
+8. What about finetuning? How to do it and would it be more token efficient or more effective?
+
+**RAU (Retrieval Augmented Understanding)**
+
+There is also RAU.
+
+Designing RAG
+----------------------------------
+
+======================== ============================== =========================================
+RAG Component Techniques Metrics
+======================== ============================== =========================================
+Data Preparation - Text preprocessing
+ - Chunking Strategy
+
+Data Storage - AdalFlow LocalDB
+ - Cloud Database
+ - Postgres + PgVector
+ - qdrant
+ - ...
+
+Embedding - Embedding Fine-tuning
+
+Indexing -
+
+Retrieval - Retrieval Optimization - HIT@K
+ - Query Enhancement - MRR@K
+ - Reranking - MAP@K
+ - NDCG@K
+ - AdalFlow context recall
+ - Ragas context relevancy, precision, recall
+
+Generator - Manual Prompt Engineering - Ragas answer relevancy
+ - Auto Prompt Engineering - ROUGE
+ - LLM Fine-tuning - BLEU
+ - METEOR
+ - F1 Score
+ - BERTScore
+ - AdalFlow AnswerMatchAcc
+ - AdalFlow LLM judge
+ - AdalFlow G-Eval
+ - UniEval
+======================== ============================== =========================================
+
+TODO: make this a table that i can put in links. so that i can link together other tutorials to form a comprehensive playbook.
+- move this in the tutorial section.
+
+For benchmarking datasets and metrics, please refer to :ref:`Evaluation Guideline `.
+Additionally, FlashRAG [3]_ provides more references to RAG datasets and research.
+
+
+Data Preparation Pipeline
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Document Retrieval & Reranking
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Multi-stage retrieval from the cheapest, fastest, and least accurate to the most expensive, slowest, and most accurate is introduced in :ref:`Retriever `.
+
+
+Query Expansion
+~~~~~~~~~~~~~~~~~~~~~~~
+
+
+References
+------------------------------------------
+.. [1] Retrieval-Augmented Generation for Knowledge-Intensive NLP Tasks:https://arxiv.org/abs/2005.11401
+.. [2] GOVTech Singapore's RAG playbook: https://playbooks.capdev.govtext.gov.sg/improving_rag/
+.. [3] FlashRAG: Python toolkit for the reproduction and development of RAG research: https://github.com/RUC-NLPIR/FlashRAG
+.. [4] RAG and RAU: A Survey on Retrieval-Augmented Language Model inNatural Language Processing: https://github.com/2471023025/RALM_Survey
+.. [5] Ruochen Zhao, Hailin Chen, Weishi Wang, FangkaiJiao, Xuan Long Do, Chengwei Qin, BoshengDing, Xiaobao Guo, Minzhi Li, Xingxuan Li, et al.2023. Retrieving multimodal information for aug-mented generation: A survey. arXiv preprintarXiv:2303.10868.
diff --git a/tutorials/retriever/__init__.py b/tutorials/retriever/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tutorials/retriever/data.py b/tutorials/retriever/data.py
new file mode 100644
index 00000000..aaabafe2
--- /dev/null
+++ b/tutorials/retriever/data.py
@@ -0,0 +1,21 @@
+query_1 = "What are the benefits of renewable energy?" # gt is [0, 3]
+query_2 = "How do solar panels impact the environment?" # gt is [1, 2]
+
+documents = [
+ {
+ "title": "The Impact of Renewable Energy on the Economy",
+ "content": "Renewable energy technologies not only help in reducing greenhouse gas emissions but also contribute significantly to the economy by creating jobs in the manufacturing and installation sectors. The growth in renewable energy usage boosts local economies through increased investment in technology and infrastructure.",
+ },
+ {
+ "title": "Understanding Solar Panels",
+ "content": "Solar panels convert sunlight into electricity by allowing photons, or light particles, to knock electrons free from atoms, generating a flow of electricity. Solar panels are a type of renewable energy technology that has been found to have a significant positive effect on the environment by reducing the reliance on fossil fuels.",
+ },
+ {
+ "title": "Pros and Cons of Solar Energy",
+ "content": "While solar energy offers substantial environmental benefits, such as reducing carbon footprints and pollution, it also has downsides. The production of solar panels can lead to hazardous waste, and large solar farms require significant land, which can disrupt local ecosystems.",
+ },
+ {
+ "title": "Renewable Energy and Its Effects",
+ "content": "Renewable energy sources like wind, solar, and hydro power play a crucial role in combating climate change. They do not produce greenhouse gases during operation, making them essential for sustainable development. However, the initial setup and material sourcing for these technologies can still have environmental impacts.",
+ },
+]
diff --git a/tutorials/retriever/local_db.py b/tutorials/retriever/local_db.py
new file mode 100644
index 00000000..27e57a2d
--- /dev/null
+++ b/tutorials/retriever/local_db.py
@@ -0,0 +1 @@
+# show case the get_items and get_transformed_data methods
From f1c77b43b5f3df86710c91677311eedf634edfe1 Mon Sep 17 00:00:00 2001
From: Nirant
Date: Thu, 12 Sep 2024 22:46:01 +0530
Subject: [PATCH 06/24] Fix url in bibtex to latest repo
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 7d9eb63a..049f9d69 100644
--- a/README.md
+++ b/README.md
@@ -238,6 +238,6 @@ Many existing works greatly inspired AdalFlow library! Here is a non-exhaustive
month = {7},
year = {2024},
doi = {10.5281/zenodo.12639531},
- url = {https://github.com/SylphAI-Inc/LightRAG}
+ url = {https://github.com/SylphAI-Inc/AdalFlow}
}
```
From ea7e028e5772ea89d3bc79318c18854de88a2716 Mon Sep 17 00:00:00 2001
From: Li Yin
Date: Fri, 13 Sep 2024 11:25:11 -0700
Subject: [PATCH 07/24] add self RAG in rag design
---
.gitignore | 1 +
.../_static/images/RAG_Enhancements.png | Bin 0 -> 367396 bytes
docs/source/_static/images/replug.png | Bin 0 -> 202051 bytes
docs/source/tutorials/evaluation.rst | 11 +-
docs/source/use_cases/rag_playbook.rst | 135 ++++++++++++++++--
use_cases/.gitignore | 1 +
use_cases/rag/build/rag.py | 9 ++
7 files changed, 146 insertions(+), 11 deletions(-)
create mode 100644 docs/source/_static/images/RAG_Enhancements.png
create mode 100644 docs/source/_static/images/replug.png
diff --git a/.gitignore b/.gitignore
index ac12f685..e74eb2bc 100644
--- a/.gitignore
+++ b/.gitignore
@@ -36,3 +36,4 @@ index.faiss
*.svg
# ignore the softlink to adalflow cache
*.adalflow
+extend/
diff --git a/docs/source/_static/images/RAG_Enhancements.png b/docs/source/_static/images/RAG_Enhancements.png
new file mode 100644
index 0000000000000000000000000000000000000000..946b12a73ccad0fcc3428187e0ba55d6ebf2d836
GIT binary patch
literal 367396
zcmeGEXH-3)C0RoH;`zCwpJ*
z%$W<2GiPuq@NmJE5-Qo};2$hUHJN*7in^~ZojF5yM(+My4L8G;iSu3>nxn0o%wlN)
zFZ{`IZp*d?e(-0RW}~>=soM+d@uXx;W7^L5FDR-vrZgp|xcrh_B?0fzBm1Uvlo!vb
zGrLQ`za?*Wn=m;t$IE3Otu4&W*gC9LFL=SLUEL;5MFom-b8}?{NhQx<;rz278}y_y
z8&P8ZXRz_;Ui_0Ek7Zs==g~5b{7#)uC$k~7Xtx6m`$mo;w
zi~o2b&NQj-{NpwF-}3#REA+qR`!C+~zuotL`OE)y-+u|n{|lb~M;QNq|Jj#D!NT%9
zIHN@60zqhINv>>2NXR{JxdR!H>BuvZQc^P8+uz*W+>9W-&ixvHGhJe)gnRn=p&LE79o^BmFpFQT6}A&?
z=7aY+TT)k^=Nsp{EO`6ZSnTs@{lj(ian20*nORH<_e8(#%j)vBg!~bbHC~=+>JL#_
zNchi~B>9G5c6MIQFF-!$b^}qIIJPP~>2*rx=svY{7au<*4q(1*lJnSjFB>JjyplBJ
z=l01@*uUWYVGF+yaymEN(i45!|5yOhU?D7`B&&_6v*5pi&HQFF@ApcS`dlV|eHGsQ
z+8KBLo#cJ|Oa?~zOYq`gU0&SKKgJzZ1;)KavEcDv$E~vZ*Rs<|%{RJoNSZPkA@*G&jKuKMzG6vvr
zEhgSwJR$(KXb%MkdKvd|KU47&C#LwQS}kah>-i7
zix?Ls{nf~nly`^#eyA}^%62sBHIn)%_5~K53L6&0DC^10pKd^B25#KarN*oJ$5eiJ
zN9y-;ZgUP_im~W{|89}>^yCFrJwdZ@utiLjJ3e(j=%VoprjV7FkBZ9t91J-d@NZiL
zFB#akXQYq!-{%6R_995q(8q7_Ix%=E9R2wMVy!*hjHHk6&uM2buG7zdoo8t_)RpTx(dgSKTDono
zQ{@7^D)?M7U%QyA-edQD?OH>QhHbBtnu|+i-9blZUzw)0Ec
zP5sk2%D($^lvF}b;;PTDt}!F8)y>7|na<|+Mnoj&X4_co#>Yb@H4>7>?$B3C)F
z(q^uMT|}W%3(6oQB$T>;dW168IfnaoHQ2WgGuQja&dO
z(SuW4im=xADZY17YPFB|t%Oiryv_E(W)E?cn_VogT=^p+kWzq%(ENnz3eK;~k`?%~
zbCkg+-~Ng*23SNQeVz@|zIbBU
zbdO3fgrar5@zhZI-BsgHibGF5h6tC2R!1%3&GP0qr%q2;p0>VuPee1+Sq`tKgN{|$
zqop>san3dM4--Ok?Q+e&CUoZZK7IJHPmh#`Iv|HQNJ`6V!;izTu7~^%8Xc~h(j*ns
z@ru%LR)8hnh{b)my|F!%Ta;Vo-8Ih5>Os`x;jG{J>pVu6@p7b2yLp~1lFG>`z1JyU
zXwxC0e%Ze<-g0k)?^$f1-=9#4@ePnzam4uD{T&+bhe{e7zn`6*dwml5!z0gNk;?|Z
ziyeq!XfzIa)O*2fLxcTwvBBZ%hY5mB8Y2B0Ek?GDQo^olH(}O@n7uu>-irSZX}<6z$eR8Ost&u89L7Kp-|7a^w85$zrB1#NK5t!vSeiCFlPxWHX55dd%yY*7kf&cEz}g)~{8vRo2Ck<=w)ag?@ns
zBv*&Ixf{15@;(*b7*t9w3g0B-{j`ia3583<)~?kVB=8BvPUk_BmFWLG`Td@QGnBfB
zz4^oM)8?j9D#)$N9Jjho!(04*oGC9f>BPmA`qb1JM2+@z8?4E@rt<;?wg)?@h3j`}
zkfw_z-a0_uk%dqIU@VX
zw@DJdtbG#c<9B;g_&CZL0&X6ioK+odsu_%o{DVUaN}&{-rn+WYrP>7|DYw5}xlg!#
z4Y9G%uPnyuXa?tjcVGqJc77F@`i|B%52dE@Is=O@7)M);_=*
z`IS<3KrZDARJUs9AnO{OJIi9Q-{<)J9%02nV`#vb|Fq?Ev88`I{8PV!EJ-U#Ok4U_
z6%fqhw=ynjFL)v2nbyPeKjP;j?ANdHBNA2&$vGc+%cw~-U4Wb%Ad9oNMncH14;NWv
z8%NEp*E3U*wI<42)UnmiCX-oAvqbC|(>-+#{B-oH3ww!IceG6AsF(q4P9lh~Ybz%3
zOy51*wGgI-K0+FuvTerXLwYZZ!hCL+Y=oz!7uW7;HVrIgeCyZYHE&bkZkR5=u!!t5
zjI@9ECR*5eIS#%S%2o5>bE5Lu5@im9n*{T4GUy!<^EEG_VjbR7foDP$Ah{-P)BiIG
zl1$YH6OJL^r36XNPxlCR@xu<1a&SOpcqerI$RJYRU|Z(3N081G&l78HXc)mnJZACo
zH1Z<4;`sHtB^vjTq4k`x4cLN?2Yl{x(Xm+uwF)N7+YiZ1dYcbKpkYG=uGfp%*SLdo
zu@GXOE}h8EnD!cHYeh|2q6Q0CEsbrRDp%dc&EmnSYo1P}9QwDWURvqGo-u|}az{j8
z_hxZk9*SO@u;UxlfqhTh;e9&KA%w2zuM1nxN%4ueogv+EfKKV?V}?Yz#OkZ%C)3F0
z_Dl`iRim}?bYTgd?x^{%WnTNWWu~1JGZ``-9_rPFQs=9)^#m+?QjJUHU52dnz0v$-
zmNU}s`&pEx&4K87*w=##NQu*ZtLn7_nmVnr!ThRPm(_@>Nv~!Ym#%TKJBq?#DTRv{
z;kLCc7NeCG)0VcUT_uh(bU>O%;m$b9fUy-e4_@hB{%jn;M2
zG(Vb|nfY*aH_@))eV#g#TxSfYqULrW5q!r}iH^vrA;qh%#VhU0zK}*As#917j&$?6
zD1CaZ^i<;Xy}+cKfw*CLi)bX8%6aq4YUZJ$!2XfveY%;d{yVpC7^5VOg{+W)S
z!R}=Shu6VS6xHuDP2v2jzhg;=@tiX39dKTG$k7;Cc1?1X)Y)6~(r=;?
zU{=+~qf|~u$YYYYTjL-TF=#}Tyv1GO^D6h;<%$=v^V1)xE=QasqfmrHqqqmBfm!&F
z1L|5{mhI!+sYJS1tux!s79XUIGQ0*;id_igguPqdrqD^8>}7BmY<)m*2|L%^Yfg>`Z7x1Ek~arKqmWnt50rzN-j
zIon2&EfhQ=I+}I8?l=a%X)UQ8Fiefxzx=8R53)91t)8uz%%OV>ANz*npU|puM^U!P
zT=$-JT(L@DqaJm+M$MEY;2Ne)Z%#vmQi2Hcv)twP&-q3v3d5xmlL{NE|6smrtDeVq
zpCTiVr4$ogvzUQG
zo&KaEc(5Uf*ki~90R<-o5lm}44Q_RODbz>kGXkYhFgEEiW>XYW4C>Vbcl4G5cY9rR
zxwntu(t@8>)Zl^#3yFVD+CY}wALlCF7O3H*PpEm${<`dUfr5qYk`N?Wy(wdQ>qo`+
zo(_XqMfa%7_IRK029eCBBe^EZVK-Nk(i^th@vFC=mjcRke_rVUYu?P(2Y4O+JgetZaJqf{I;g0>aoj8sbiSj@)ta5
z4%?YiZZ@7A6g<2tp`=x~_pyTN*7NW@+q}Edu&EE08C~)GojBI~M?y9pF#`qKaDIlI
zcUp{t_4$rSpP9GW>5#Q4l1U>jUUQRpmFz%4GZQ(94|;cf`KNaC$@Ew+i>
z<6R!}?q_6lTNFc_tQOC_A5`p!J@0u;7B|R`n6gZHC$^KD_aYyWbjvuM{rR&AJK^?%
z;_8+a8KRHZME8v3XL_
zo`(S$yW{=+u$UnqXYR#EXsVuS1#R^#jolFfxCFDZ*uH?HPfKRI!;GSs{!^ERvN6X7
zkJGi`QYo|UHokcQr*$rpceD#IkD>|lx0Q~97Z?urN+p$E=U3vtejTBew=(z?YP=A^
zrlz;)rO7;K6+wMJ<$biy3Zw*5Y_mOCrgeu?@r(MZN^$v25qBP#l42T
zVl3y-plC>3fPw?i{JpsEq}PnI`%$S(R%)sw2rCwlk)g&6!IY%~!UTTLusn`cQ71bq
z*fRc1>+8UE^!f$Yar>M+uOj#%iNsORzURvmF=>gJb>Hc`+9mv#$(Y=zFK8az|CM)O
zs-kqjCR=V7GV=aTM{?<~g6IV`z1S@F^eHeoN@~(MwybPfue#%K6PBQyrZT(6;~^Zz
zZHd@c)zg9gOr?p)PA69D7p*9Cg^Tdeei*gTC!9^bfi&`UH8r)4tD=M2>DJDoMB*L<
ziHpO9ej@PvtnSBIfr|5-k1P!HknTsQl!8p(De-yqGOt;8gu1fGSLo}81lPTiw0Y+h
z#`=4sVd|FtIgD>7s7?k*m%DVz=aQHdLx`G3q|XK%9JmwF3zJyia{cs~P#tTCdY;uu
z&}8%bgX~wKuAl$b0;nyS(h7HMcX4Qc>~k*)e8WF`B_({?#m1Wv^>Xu_>{n`j(~qf5
zeIM+fBGwfyK&&=o3woB>Yjc#~eAe?+{*x8<8E>e>`fy$q&Q#(KZ~g&6QqE$E
zJ226Xm&0Obqg{$jy@zzz6NOWz4!*o7q~Z
zx6A$TM0MbaPDSkH_kKAji;Uk9cF^p6wDO~zt2tk?ew0o37{U`G7+zmXt0FbX|7`n?
zR)opO8y%hJ_6Q<#3*Atrc%JwMsT
z3p!T=vsR2YBf~R>&cnOb_((7q%#muzu~0?VdC#
z9*57jM)pR`4W&K~`P{J?Wf5C%=v>w89g@^cX*ljeeitjqvbx?K-WP8US<-?Ij5hYS
zI4TQ{;grQdP~5EH1_{%7j;DtNIhL}Ay;P?fu?*zj1bYwZ!MfM@N$V3lLp+sg@P-<7)rOsA4NQ*Yuppe`^pg5`WL?5GIN2)aw>)FLi1va*(b3M?un0eBYi`W^Lfw)_WJWEu5u@=B(V>Ju-6yqC171!aGHrFCD}24f8J^
zUVz|!e>zd#jd(f+EzwhR7G2OkT1}R0$)6E+Ir?yPIOS=IC^Fa@2_!O847)C2xI+F7
zWd9xnFAqkqcWb-qoi%rv@G@A~_}0v0el_J(pE1yTTyE_`qr?-(Ws1R^#@7_QJ`%^ln6g@8PNdk6qKf>b>tS
z+3e=94p5AauPz*j_DR%IJxIa`wJ4*ptcwrKVCc_us?@Mto?FHa~G$V=D#K~GBdY+-C8M!$EYg_W$Al4
zq6ioo-$Z|cVnHI<9vmcu^D_uP?;-8Yf9)|$PY}JM)s*GAI7;x=;Ae)EuM~+-DEBirZZ5%@D8lrxG0uwRkYh
zU!aYtcIXoY?ATl;S{9P%y+H}Cq^kQV0(#u6CZzmgs;R`q8E{@oD5-gULLT2+gQ=mFIMZ?
z2@s{(!}Z44y#DM?XT;R>GA}5(yRBLd7-JZfMOgtPPG~V9;pSUSNF)5&O|Rg!@Y%(K
zHxMz;B0h`ZTd6()Bg55Ne%o>ze59PCfAZyc_W_&LFRRP>FC;N(3z)*x3~UAk^o{m)=ZUe$Wy{YvEUdXxukD?l@P6N{|E0
zL6&_+%hqQyg*lkE(d&?qoS}M^M~_%ekJo)4G}l`re4gafi8S}r2rP6Zb1(2)Q*6{f
z69ee+9n>a0tL4r!-43U#N$-aJr(Y%4SP#a&Q)ILD{0K*I2oQ%
z$Z}4+^96D{3wl{Mn`n7qR#NY;_N4;}sac3+{Nfw0=_(d`ps9HP=0`TofxHOv
zps8V4+4PaSL5jKd@?Jg;YJxKw3G(RMV`lI(rwYlj*MP9mbzh3}u;>QZ}
z;KO0N#&(c{&s}HN!eMkzYc1XIzB$y
z@>APTNLyBY3eaM>ZfG>Wc`E%j3XPQDH@Yuat4fdIwH8r!C!Zel)wvT?WpOKRXq=tw
z=EimNS$mXn`(*8P-{y5@p-M3$#%2UHI!rCON1s^GHn{TKM2-5Z?3QgP>y(n#!p)
z*|ojLxS3IC#FyN7VPV#F%A0*Cq_PrsHlue4syEkkxE?7V%~q{xN|dQ=1b0HlT8T;o
z6&_pHx;vV8?%0fFb!uw2)4GmRwauNiN{l1r&bzH&Dn!RYP5-AZh6NyCxRlAPzc1rx
z13&nQodd8q(lc(5*&X3;rX%(*gbt(Zv+iLWk;^JEfw#>5!XmX-}bKvQo
zb9u}u;qVhQ_Y%T4wcrz!Ioy$$x*_S-ZK-^XyeA15N7%h5XUsA|(|sXiNZ!X~C(%$s
z17vk^$cDyAJO3PfeEcsvZ($*6lTy^v4%Zl0FB+lBA-t}W*wtl@=Ea_a)OZzIkP75H
zCx~e&*L_!1_fHOeI~RLNI)xMyF4tDVPCeHT3+!1HqVI#GmsS)unNm}wS7l2tg-gQM
zwD;ysJBIwJ5>rcuGZb|KXo1H7!iR}F?YGt
z?+WNV$i3!XbTMKMV3td{xzkGkqBU9QPUdRYg;~jYcz9HI;sWEJt-{r>ajBP=4x{FD
zRu&)OXC}h)-uosoTiSY!e-KH1M^uma`bmm=bS17T%qju)JzMLAm(%7=3^}%U;hpX1
z2i2B0*DUYxA5a_Do5@?{SI-9$Ax$5j`Y`G90c5cnFi#JoHId&4o9j^BV}03*dSqSp
z4NAknV;oD9nnnRoB>jDcY<9wR@(%gosDXo=$`F5CQ$IMN+e^{{zOqMp*ZYs4VC#Up
z(UqzZvu*fN7;m1X2_493VuqesrkX^Pi)zkl7HD-WzNdeP7V%cx{X$vAPLT^+qq1J?
zvVFmWah2%F^Vhso4tB4zY+h?ppC|@sQf3}*!H>!>KRDKTk!6PYVR)pnc-FA@_qrl;
zi>{)?s_8uQIP}T+YaA3I>an0g=U|sOC>aP1H()oPHjsLklk8K2FL~gh
zbncQU=TCM`M<;%scM${LFZGv?i|1?~bk|c8HK`MAogA+#CoWjdbwp#30|(UCs=Y#$
z{FGRQSW(-1>7zd5d#}?|EQcN!QPYaJJj_;#=fKp~6&Af}EpKSq7m^WONXjb%-us^G
z9hq-l&!Zj%ASc(*eO@ZM0Ub4l2j?iDCadm8>%OPFMz}%;ofr2}gNU|O*YTx;LrFQf
zlXcIO-gE6!$kt@-dfWx%GHKz3g$52lEt$nWC;pYj{mAY_K$7s(92(F1>#}4pAfW>B
zVr+pP%#Vc}xK$qRsn6cG^Rdg|K;tq`vX`!AKhNuyk3~p?R!kC{eCe@GzUEtfOuRH+
z9Xe2CF!oZEk72vm*Cu?V+$^NQMf+V0r(QycWloBr*U8XEu9j78&O&`U`4C@S?p;2j
z(rXefT^h67gb*uPyKnycO!qUesEMkU*`Ff^pzOj!XAgHf2>|nrNl|!-;UNWnxXMs0
z@>Td4ds1iF$0k6hH>Xe!QHeU`<~_=Z)^UDYxjxK=f%kOEJz&;tz^AN&hC%|UIqHvh
zL^4nEauRwXaNkb>_>4?==*-d^fEXR2PIGbf)iwsGW6)j(_0I6zEOS%CTC5J0bKqHQ
zM6SeF1lw?m_@fGn9VQPaT(0+5it{AMpr;DkP*d9C)n6-7d2k)3Th%ev4nf
zECDN9R1P9;N5Lh`KlxHqqX>;goKX~_gNM&YtcyEdZq9Ux9RR3z_JF1)(W?ERRw{_N
z1EjFlpDLR!U|kX=I-Snre^%&e(3pA4b4}E8AXl|iCfE7Y!2U}#V55CmwTy*D4qX8$
zUR8KJOEyxse?QB)qcpc(#Axr1io0M4csID$4Eyw@*DZ|c8m+4Ka7w!aCRnneNCRun=rU5@<+NvcL!kpE%P5y@!;BIt9h5Qy!Z&>&X
z3rr43@kOPdj_4RDNwGHoRF7sVk
z)Ta?aY>@X2sFdLPz(|Ph?bnM8P9O
zDuHZn`A-L7ZnN)$9u$9D?WxZ%PDI=qNPk5Tvg&S~I_%znTZASbP}5)lilBJdHesQa
z9`vKA&GPJ}V(%%V^;n45y0a$v5Nf5yod6=d-s|qT5cfE#SgP4XH@VXLP@!H+gK!pd
z(~bHBgh1tq94fMw#H{)>Pgi%hl@Cv%-;3?0On9fJwe&u1k?+q|tcN@yA{<@qAL-VX
zYy_#O@ExddzR^LPfO!~xOpFts^^iU;@wI0SZ>Dt^$5EnChWy=inqqGDQe|%!%V<@o
zQ~v1)ip5&CpV}YIPihcgKf^2w1VQ5okJ!0IeACkOprX(qAH({iJ_R1TwP(dC+QKP8
z9_Ty9le4oX+Eb>aPKjIq&NUV>h
z^K2=Js=W?ZtG1dBI~BrSsKYBQ5R%O&M~Uo6aCgv#gcWO>&e
z^@XOBs}NkckO`>T?6OBeVYG9dv?8-bB6hpfqRwGI(1D>uyEeeB8{AcZR(ROgDr9=@
z(mW^_aBq+wRx0U34L#Sg7?e*&QF2Ahf7QcTUBh?T&^TY|d+OuvJz~AJnZUm_s4b#M
zCu_qWH&U%YnWvf&xL@Cp{>b-^<<{jqEoee~ByCE>y##8=cEp@qEC#~$gq7nqQE=h$K3^f-e7Ek9n*BknJ$LGl|5lSKJL|j%F
z5Kc%lHrn$uU;(I_)SS#Y+T$v4RZSgjM{SY7!--B*~q`P=}9^f@K4SJJd)?rt|B^IVSW#n);JP
zTswM=eBMum9W!+rdTIjk2|FMkZ4GN7F)2
z<4)+&pZ&JX_}HTwx#o$%spst5gX?gc`h)fF;Ql%Es6;Syhm84qzfIfA2K-J}JH{Pz
zCZ=7abCchw;1KED7q&3J4)luemq1lBpsviRP?ZD}{5LQi6JWG|t0$}n(V9p=Ud5sk
z_z8NIQuT0dDg)ckLaFHg#PvHc)Q!89Bj+#g_shKu?iJnHk%=i>!hh(ebz{B)+(|wj
zFj-gk_sy5avkJDCC-eY>XsA;&tB*s9mj)&P(yg_lEd?rxwTmNF>z>uyeG=_wNyHcy
zAc@yLB}*dk?n5vuq{l1$=I=H8%g=$zx_@h6UibIxRawF8*>Rdpz&_t4HEuwF@D&-y=%dhqB=VQJIg-AD{ej=QG&i;MQgQ
zzi&*+k^zSQ6O1N(`eUfv&x3tj954%7O7}zP(_Tt5&i1n}!ZyidXUrE*~I?=0xH>Y%ej0$PzIP4F3*8ADE9&&>?4
z3zdp`&xc8VzF=Wt*qrl|i~1e3+<@Do7MkK@z9Opd}4Gv02+?Awu{Xs6QeM$?|=Yg
z!=M&GcHY}^ZF?W|Np&=y!0tLYILvN!ibU6(jwfAhPZC!&bf243+x;e5Di`T!I
zpcQ~?y4GoQe&xFFy(95U=lq?G&U>!mC_mUDxSF1lf-$WmAHgDfK3a1S^b=8#x36o|
zgrRLd9??If6Jk~U(X{m2@|aHz7{3wtPrnIlS~}cak9|Nu**0cZ&)Q#ZCR=5Ti1E4ta6^|CXTW)>tN5p-&%m+6G$X8HSs)VLDMxQ3(i>$N)q2Ty3@l=
z`y32!k6`g>ff^gUc*BTQI{1$uqfrPvHUzz}{M)rv=}5Uw>DkEH&rUrA;@GWacJ9uZ
zcVcWn#LZ7jDw_4gc3R{$?R@H>>%;<_4C*eBB9T8E?m(y>=&DO$LI$
z+5Q#g99;S6wf$7a5lt~*ZG$ZQnr{eI?>8}JOJMfGJ;2qGF|xkb*6`%Ir>goM^zW#z
z*4%Strj$uF!McZ9A^p>9uxf$3ugdrS-dle)=+L`bzr2|m^SdWu#ow}TRzDtSQ<4T#
z?+!mvy)A&zr=MdX4LKD4cw^5-X=5Ncx1>aNYYUb2QFRd!2KtF!ND2~wmMsMbwR8rC
z&q1hL>|W2R-OoiBsi=ILd3*+sZX@W0yALCC$S>{s@6lnPja8Q3-^}D0ze~Vfq}O)v
z9!S0gXVuzy>1(vuX&;IMI05-ti?RLHuzO^ZoE!`p#&3#l=&oK92#=`Nx`4Pp=^2X_RoW)rGGsYX@3_O^^emMy#ho39~
z#G~NZ2WtF&j1N%OhNG)Q*zOVdwqRyxbkeG!7on+smO&Rw?sK8tXiG?_3Bam#3
z+DsTMb4F_A=~tIk*iJ}epnRvrVbzGpNQ3^)?(RFty_3UE)BcvXMU?MQUg79H8_
zi&WU6jW?erUt#>UIWWsYIuBf=*~{g21Io`LvzWhS>!8v8A
zQXgv3WMNMAA)l?+CZBcGs-~BsEc;EG4~VuFzrSK?TVCyvTnfHOkKuxu
zqJ(WaOj1xanckkDtQSl{<{qE77H=|aRhdHg2-v(J0_Q!RwBzqB^&g+`*fD*3u&ac>
zcNt*&({H$e?g;)5+2mx$v>4o#Dw0V7h*u{PJP0D38#kHu5rOE+Pl1@u1S;WYZlxjl
z7_-fw5#j?_un>@0Ssjm7iWzlV4|U-%d!M68J~5~@WCXMYhMgRbxgU_!IpwL$!xXkV
z7KHwMfJP?vAonTtr(ILdJw5TbNQ=buOtnYy$P>OErOK)7T}}ropNBDA?&DK7rar|t
zR(@hxOn_CeU2wxyMh@iBW5BUIx|Q`s_x1X|A_LEOLF1$;&t8NuuBZ6qT7OyL)7*7y
z-xj|U$h2kZlZrp_`F}ZrK=$_>lPxP1G(Gx#`KB5jwSSJ^C+m&vmPC&a$@84D6c&xT
zVq0M%>kb9imbhh?-HCixrD#_$s)Bx=BK_V^vyv;(Kch1xWW&s
zfA(P6Mn4ZIh1Y#GI=?#?M$6;YbEfgzL-=cbx84Ul-arc~wr_$c=KFkbO)_fVY+g&?(m6P*J|i1EoNHr
zCx9)={NQ7#7Tz^#2NDr46dF-x(R)v4zcMTv<6pv8W_!zB@vjR}yy)pvo
zNHaFEyB*ae6(|q-gOCdt__)0zX>F)zp?T+^ABpU)6xrO8_fYS9qr%vlm~79eVka#I)_d<e06rPks#(E?{%{MS4PglHHCv^4cJMQQqo6!$G^ESl?%c!5QM5vhEA;}e%U5SlU
zhDX?0NQq;OYadRU9PRG=A344Cvwq7DSR|D)OWrx{a4G3ZFn9mH=HVlbQE
z__#iqgVSXSIoPr<&MkJU2$!><`^Kn&&3o-HwFHMq<53?!OwvDR(y(!D#R9an@BoQO
z)tMA?e$85x!w!|~p$g6b8=D(-J{mQ+o+cG!G8=4Mwpk9BAE$I#KW}I22Pz6-U%O}>
z@6`!3WO<9dC}#&x6d{H$+GPvr#4zqcqg_2Zx-Kn1gO%FUV^*?)YZ3&d1f
zJx_Qwe>tq*G#;jC0Lqs8>@LfyrxpEKO34z2=dL9c;7o>2e9WqH0m7O04??f=VhO`D
zIF2i=CUncpx-NNt`B2rYWmk<6pEBt%yJM!X4{R5!kGcmYAt5(r7x8F2wSgiAs){ieaoH_Z52Us}Rq;24!E^a&-)ev^R)=Mgirr2=ohTDes>(xf&i&~w6-d7%*<;wuXE#Ojy
zmk!taq|OZPq~Ya`K*y;*RytZQf2G$zQoiwl1|Sd}rzLhNUX`1?`_GzjM=EU!KyU9y
z=2sKbmO8CV&~)=!msr`MH}PuaTF@7<9eNwQJxZbl7d*!bjt@w}0TOi?IXZ?G3n=s(
z=T3`L&fT%kT|-JD;QxOHel2WF>tEke>C@Kn7IMIVLY;&ihpr0Eyx|RrA0zVduwGza
znXd~@4$q!IiX0MVdRKr~Ho$xsUvZzcAM#vSt(uIAHS|zzm@IGI@wO3g(;eDB&qFB^
zK=MGPf6Oph*rrC`^_b{O;N`rg{<5MBvYVH8Vad)UnM43Kt+4p>1rvW3h~EH)!Z)t<
za`X4)m(Y)~Mm|egOB&~OYr(;yeX8+VS+;P)evAk{VzZDGKCv@7Ix#R~MkO|HET;(k
zoGfHIuL|MssrAl@nCUVWzlbASr&($3wWdcBMz*6>6ZSx%|0r7t!oaTOrSfdloZo_u
z(r8?1Tlg&ve+)C^5nFNnK}bRJkJYhs<#Itl>-=+m)}$0PTA8@Yoi$-;b;YYi^%E{a
zBaI4%wqX-oJg4e1<9b-a4Db{Qd*e
z%RLCc^@&=_J
zXq~`Mb#Kv>raAV`6MkeH3&PKT$qMlLa
ze>7w{82q>s{7ni|o_)yfotf7&!F0;U#LVB}ueD9PVs2|&)YaAjT-~dQKN~~N&SPY0
ze3svx^B}$_LiZmtXnB8&!E
zlp3cuohScrQ@=_V^JV0fj>8&_P_0jL09xdf$6yEl<{H0#Re^w-r&B%A`nMn5&-^Gb
zbHZt5h(){Vx^9#q4PDT~OI&{}#Qul{hh%&bpK&O7p#pa-P&>dQny<(Frbz7-^
zn%CD?w~QyJ*8Xs`7K9KUQ-{F1{T3SA)!3iX!+kWu$P}p302ek`G-WNxZ$1A|Xn$5+
z9=7ebGWhts@?S^zcibnM`CssUO#g)}<^QkgV=#L5HxPo~3ab15AAjm*iqeL1Ri(N`
z)wir{}5GXSKO7ifuf>fGLo?ts)DRq@>t?ztZM@+D4Hb>rp%kIu=YXJ59B9)=a;
z1KZLHQ8Y8e6@JSUNHan3XW2<4<0mZ$IkeGpYo4$?APA5s|i6dis>^Y%RU4<*`)W=KGQREFQ4QW6)e}+&C~@z@
z*IJdDh!kSlOWrp14fB(_`&wiC;tN;f|B~1C%w5m_m|o@FAL;@C2#?(*_N67ouUCNZ
z;RfiN3kei~$`ns}X980H(&=fMn%y@I(2{$|T{Xo8vr1;r6>^@Fx!wU}kQ4U`M~lr%
zg#WUxFa(pjBXge;fh_6}>v8#wCiO%WzTeU5PwN5``;}lwgqY4xslOAcPj{y=I(9&(
ze&WAO^cGaW`NKVU5C|PBtDBXpSdO__JO4ycG|(beso#0_h#LrI*eM=Xj1?D8FZ~oB
zqDIz1I|3_)uW?M^EA?dEB!65n;V07!2(REk>SUQ*Y|pesZKK|kw&5Vf4`mgtn739?
z%MzoHV7_+WymYjm(HRf$$Xl2#+ZLd#fz(iRndK%G7Q^((Dl#hD3
zS*uSKZ-p>l|D3l}-v3Q-b7jD4>i}==KkNg16OY@E?3C-_>1PH1eOKenFDI_O8`P!i
zAftL475yaX_uil5!W*zwv~A}p)NcA%q0-^Tl#G8(r0Q#^_uDK_8eojQXDIZ@rz
zfC4!)YlckN1`YMlV^_c}$Ft?tx~@CiR)#0sn5Z#89;~Mka#+be1s)=&HDB?BT-F;}
zD^B)9ro;Uq^-(0f#3=`ml5KU5+3kxOJV?yM#qJvF1j^b(htsoDrr0g-1
z^3VB_cHt^t+EOWRg=3NmBm#%~N%kbnVvzK~gYqeYzGb_a8;FuQ(48{Th
zz@0~>-_N$k-7*ZjCbomzGpQ9K}D^eNzVeM-I
zf2?XoD@HovaTl*;(cP;p{!bn%ugr
z;cWvOMn$9vs3nHR-knX`_bCLNQe5}
zW~f!O
z2ZxryIN!#_-2g&*<=A=>%5muP#8GAp2i-AKgolLPXj#Dtv^)s-qkwS8vNBFi=Jmsi
zfqD$H=eG*$5p>*~aW3~?4;ikucLBaVnTAU`0d+2`O1*0GWdK7`P>`^NC6Ew&+d-1J(3@+Oq$9znx76`1aFB
zQhe8!Z`=bt>bMt&{xXWZB-p!;8R_ojQ*<3ND2}Hf1P|!SFeOws!*WtII
z?f=d`>ur@6v&3PBTM7ex^t&x3ZW+L<`f6N|!=JS1&nG|EzkEIy1qg}jT&aG4R`)yo
z5J0f@0Dx8)DB(2n*nTj7(d{bO+ua}(*VJ~tIH!>M1Qh@FF5LINtLwDy&*UDzo2&5X
zNzg*QOVn)B^v!q6#<@x@8|-E)e-`gQkIu4EjpRO>emyfEJY^?|K4bCc)V%2xIMWgZ
z3@40?vFhObFLN4=l{^!ab;BFLGi1-%Ab{+r{ch=AxTxZJrXlC}c-?8%z?1YZqM~9{
za;6Vn6=@xw{bT<0zry4TWFYtZM@_!bpf}*_!*uCi;qlb+WW6aM_Xbq+8-XO*Zs@c5
z%irr8MqOXszt{IHuAwgoNK=2!-doxbf*(Dbtq|yP5Qk68Y^9nzhH>`Cr3L)6jDNDN
zK*6)!0_M25q9y{!0sf~I5cgfPmPj_&t%ZkJOkZDL1~_!CM}~kRhCL{Yq1dYO=HcUa
zQx)=QHFJg-0~2GfaGvEl4EzkP@$>s1Soviqlp2=mM$G-MANu=G@N0pn>B0RMkkOam
z5|3t>oBg>MvwUxollf8t_A7D4FJHdw1s)hZ0kQ69W%*X$|D-xNDp;O6D?6*x^R_ma
zS?(~yjk7B{?05J!{WJ;z!V|J9yhXji{_Lc`ADyK+oOA0zbo`sMG@R@5M?nLjTJ-ek
ze-1@8Ul6+m}yhr@DTV4Ks3CT
zIsLz6Gt`$~zC$&DSZEY-{Aa}jhv=Gz<03f4!_iw-^{5KO~
z>@4UeN&jXXsg}D*p8Dte{e!x`kOj}aEw_Hn^v`pbX^x#ddD0S;>W}pG7d{~JbPGNK
z^D?B^an$bhkG~DppDF$3ZorQpJ$n;)H3AR9j8UnikpH_*{|_?D+2F%e`ze!Z&r0jU
zKy5nI2sm;^9)6^wF7h+Lv;m}$6`KElZw%J^{Sn$T71pqNBm$W61oEBuuq?P8li&6=Z=7yJKw
z?>!Db(xK~0?}-6>=2stHdvX5V>7e`P&$get;1=mHWMcA7LF+;EZC736zFdb+^Nw|~
zLswH%OU@rVbemb}ApPoT`t6kY6gMm2GC*L<-))R|DUUyL-n#DjJ!VoC*WGfe~E(b
z-*mth()Cw?Q@J-s`$pHvqndKs>$iU|=D90?MD&jB5MUxE
zUnk|4fQa;;3nQ#%26j2?LD`hzS+1?yf$z$dan`2&K<@vVWc5Beb;W{W60w?T|PKvEe_xhpyg_)A%&?I
zb)JxgkFFR0o+m)&(5P&|!RsdG^jfLl-fu1GlI1h0@{Hj(sq&t9B0D-F%`Ugs
z4Yi!Uk#ZkUHbm`uzm{TO-bh}qe7(4PJ8Sc+ci96SgNv&Ze0`Z3Li69=@=Hv(mjFoh
zFRWWauB+-`(ao_+>*83c-nzHEDI~X~@>SLbL@hUtu`ufErZiWy^eFzS6yxV5&tDJk
zwU#c5Sb=U=gk?#cJo#XM@*u`s(OInJtA2{9Rj*hoT6t}I)&&LAeW>Gv&u~OSElgyT
zizj`?t!2k+VgtK;=A8&KM2$h2Rl|Wkdo3w21T8(WFjURDezn)Q7
zj=Sz{KY72v5c8|IjX$m)=2oz95J@QZC2&^~Vy{G}#HCy~RDVFOC(UNE?vuLr%hH;D
zz2ZT?a##}|%v>z4i;g~D>DOT5dVnCQ$g&GUDg80ReuHXK_|iU}kfJ?VCQaplpsbe=
z)Cl=@Ju>S%aLmiN9Zb_YBp02jmQyjQqw|a>TA5Rl04?A^Zo`x#F5OB&R!RQ6tSUQd
zNUo!C1X+u2nwj(uF2lZB)kM>j4)LMe{?J^CMLB}O`)&LM<
z|2cY}p4|qhtQBssAW%#KV2dMVPOLq#|6bPY+^L+-XO*Bg`0M~l#@_(_dR1?~ubke}
zzU}%Vc{Xd{M7x=%i{JxNCF{#apRQkz`@vT}!6jzj|H5vZBKz#M-R-+~ZQN7D+(Kpd
zhtjRBE3kmhO7)41$@+*AZt>L
z;hGn~d!VrmJ0{!xJjkYWsM
zN(j6#iq|H;JCXSaV!^zTw1EH1MibgGaEi&
z$`X6zjO69C5uJ0g11IX#f{pSM?uh*sFNmmTGF$@iTxhGKe96drSRrMyEGF~h1
z5=yMwZeK80W>BK))2QFcV0Y(wb^V{YwuOX<4<2R@;u;
zn3(_2^X^u1ear)daKf%`>CDN7KKBV$3q6VY`2^sg;1KRzrKESYjzQ&eLgcX``?SY-
z^bLLb7+KuEtZpthA7)W7sC2b5E7%=Dg9=Wmh9M~HT+BMvW#MxA?-f~S5JA}3*rS5WAQ~;59yUh_#Or9C6@K2K~m|eEp(3qGM^QRA?dIBIs#&C4D#X>SQF7
z@VEhHN61R|?Fl4~&u9JHywz}T#-nn$iPZFS`q>Sh!Z#pBu^xc^@Pj@I$_1ippdGDK
zdP%3eQK!to;TD&ISo}^S)doEI|dz(C^1v(sU8PiC*4qhq0
zwzk=w`IH#oB-vML9>b;n$t#8PS8l6`CfWYiGSGt>#R)%>+|E=m$?6-U
zX*@8X)KLZJ68{a9dvVfOxpN4!Z+^nPAgmKaw_1(~tFQ~~czyvBq?oQ)ULnFmTp_S5
zKG@G{J8=WW)Z3dISC&RJa!Sui@f9}9#rrSiP-fdpIwS466Q=A2j%ng&6e|#utNXxR
zm)~b@G3!?{kj;`Ykx@1&h{zs`0HD%rC1YdU&gGwK@f*;{Hn0)5TuMc
z*V8oXm1FPR6Nw#?VDDhUd8T@?90a$_jnx?MkjGE8Gfh^yF-nog+|lY;w(GX|F{K0gO>N1!NCuc#XGT+S#z(DTC*x`N5~nY_u;cde9lMsulPw~6QVY9ZlCkgbY;UPu-?I?Pw7=jR?B^_O
znfxd1ER`15aqJe8-}2LzW2cu9*B(Ppu?x6!1RT_o_T5l~Neh!}Ez_;qnXSO)(3aTa
zWK_;F&r?{Xaz-XU%#JgUgNPs1lsBolkMiDIe=~p8z`)?6$j{NJ`6c9!TUkG>;!NPp
ziCU_%o%FRy0ZPK5-+u0Wb82@X`%wb}&VyAOWH5Zha*m@Z<==KIeu`dHsx}?PUFwMc
zIcqF}vKri=T~(FNOmTNL6|r8bT>hL^{zDVGf33v2OQ?l#KlVOJ%EYS%%@ahnTt_|e
zS$=!(7w{&e$JGFhPuEEwcKr2)^ATBbPp^%67-!3$FLaLJfIeKQa;U4>`WgN{GNJ*W
zhc)))8(Dv&*p7Let{ic7@o-?5vP_}mF#ywR$gW@8_}fKyQ7nqWno`8*sG)B2h=5dB8^A?jCKYh{!q9
zS+N!(C{;>sXDr{}L$m3jVSK>U=t&6fOl?3qHm-p*Iku9r;M){pMgNqaMcs&&gRvGh
z?|`UHvRRnlLS{|O`!8lNwzI}`NW9JYWU4#j5ikKVZVav1&
zcK2<6t8~^jP*`>B;z?%-(gNG^ckYHnt~CJ|{OHWgpIqtqHI*rg^*^
z+f%`u6S6>9)4F{aBvo&E4(k9n6%abCKqz3?%LA-k{AhX9DBYK-=E5ISITQX#qECZr
zz}ckk^5pCHQ6-!+A1J$`Ww_J@r_W_6IVVN3$ThbUv!?H%8?+IUB8Xkl5~nOBj`pR~
z=DTlH6dQVY2ZQD468I~~?3>TJ*p&we5GEJJ@7T#*gjvq)IDj?PP@)efd~qFbzSYl-
z)V~bP+}~MKjBMyLaGU!v>RiX{%C`HLmg0>^>Xu_eLV;$V5f&^&HYt1ejMVsvZIrmj
zs}qVLhW!ClYwMnGjCyP3(A4VfXw})molPahGY#QLQ=52vz=&O-S#ezA*6!3)t=A;g
z)h#}8Q$w|UW!&oLSDC|*TH+j
zpoJ`2UZS)o114*))T1FLEs-ggeNk-TA%BNE5!6KG6kKY5o1R3KREI~w|<^#+OVs9ya(
zseU%}5H>+N^we_w+Mb)R^;Y)DLptSe-qC1svZ?Ye+qUSJQ@T`{)l!7k
zC|{;0{Py980|It%Vsp3CX~grtu@7zuuh{C_EOeW{4+@wL%Ie84|McnZTsz9@@au-a
zN7Zrr`c}Wz%!|F(KY|ZHRP1hPYf>Fa*;=$rG(kZSZxM~SRJ!{e&{A)Pv3bUiNd>^Z
zuTM(P*~Od2-94r`-%+m(Y=}%++%DqlS-uREf4h5LmdZx7`tj?5HH}r)%vuMq);`8)
ztbfJdkmRnkyiC
zcT2{(vn`2d9xZpcc1Tj7p|3#^eol^b%7YH)9+7Qz(re#g^}zRxIpjOeEAJ)y>FS*bi(R=|Q%SexEigL)IsUOSvSjx|
z=sCi@E)5d*mfI0{#l!uLxO$q#(xQ{7B}BA$Qn<2P2fZv(*%gJJB)Fo|1~=zAPx!1j
zVZzvXxYSX+pm-z-U-I66JxU0P{H7Qd@rGVg`qoFXyWwBun`cc(3q>=(veP3AT_BBa
z)12Fv8~Toa-UdkQKQ+P23;NZb1qtizhK8lK56#8Tp{+orrt_-nHz1U)paX1($iSZr
z*e?is_b5CMWf0S6?z+w+b1&CQi7~e{uC3}y{sG_il}PO!!gOS3zb7fE`mW+JBD0=o
zk`!{8Y4h^~j`I_;E^kgq*#7m{d4=Dk?D1Ut2dl$jEPo{kd-#ShZ#0gl4yMjh9eSq2
z*v}_NxUTV)$0zK#mwC|D<-=-!Jl7Ci%a!04`{lmYz;+c*L(X@UmI;wx=+}CIAlA7+
zXejcZ`x85;Jl-snf8%pbS~`}i;Iu6LH8qj}MLG<^LouZY53Y|I
z$!Emyc?gA3AcUp*snBUG@+vhNR0^TRW
z*=>I~@l7}CYfv6>2mpvQ{HoCm|gL
zPWDEW^%f3S{A6K_Tc@1f64v?h2!!gxy6K|Qwo1C<7Q&WRqsGyvT;u$ygR$84$s-1y
zQpXfFiLqB0b+J+kDDIx0X8h`U(EE%)Coy)@X{1cd0{dH_l%+0)b&^(GjOCkbO*zsE2RW`*9?zz2Lt
z>z2kr&bf05@EF^5xH}mP2eb>i54n_ZEdIP0{mPSfU8K3nxQCcHaTqf)2(HP*y(JXyb
zf8M5cFiOm@96rPJ0T{)+HQvNR6BAOiy5bp*b1to0e*K)5>5YX{T)}gk1a&D|@l!5eSK6|j(
z@~4AQq+_oe8aPg=9`tEQ%wqSY-I`B}7`-K$CI^<^ZlVd?lq+ay;XquAS&g@C4xggP
zqvl^ukBE#2so`cwAom+PP9j+XOE?Rs{3ssyl}=ld1JvTxXr*<{564L~fysfb-otoM
zf?pr2F``$4a?_{$=#sY~y=d_Wp-4Y&%d^;ic1S`6cnNeAb;7(T>w){n+dfFi85`}a
zZ@C)3aBtQwFsj(_O^`Bf=}Rjgzls|yeDAkXJNcU2!`~Ch0}IMBBoA+YG<
zjN0qF)E3-DPuSe$!nAs+3D3VJI{4QHn3<4~RomKbNmvV`HM#KI3YslH#a8&qP)O>ZajXt#bblqf*
zRhcPvXjQu^;sk6T&JnYvpN~E6%b_>vXjWg>EuRWCD@V4^d;0g)Gr}kNo5CPsq|4O2
zZA2+5U~|4pj3j~S1fC(Ud{V_L0BBabbzJ^mbC(ntjxRX46RwUJf|a}nF#IS_5X<9^7
z`k_{ngX#In@P#T6LQC1>!>V%faOGi{TL)g?01B?v(patAj6BWa9bv)53tFZ&w=dP2SNNiib*b
z7Gj=PKX8Ve!Z}naPsr!?E;kXohD=Y{7C?d7DCKrm=aOf0f{m`PQYD3}uJdu;?W+!4
zFmgU4%U~b#b9|btUjjjAZdb5nQD+uT8QCX^Vb
z;+ray(BseY-j^Ot`haNzumJWN{h3Ib-rytqgEsgXjWx*eq-JR-4%21LvNVJf)lQU)
zza%Sf=us5FwSBvma;e#Fxx7=#9YK;?OFaQI3Xhw?XeavFIb_XzuGL^UGw5QCL?X?!
zH5#)c4Dac@-4xg9;vB)eF01KoPhW2lKe@?R{*-fUa@@MnV;7#ic}c(`SFU?j*BRQv
zz*lbcLK{lIh{SuTJw5gOYkE78GGZ6%M~-K=TkHQF1PC4>WoX1{uiR=mK`SUq3iIAn
zQ=lb8pH{QB{Sw@y-cHcx=O-C|6UbK?Z)xi>Z>fX5Q{2Y%i{kSgi4N8yXZEkW&?%o9
zZY@{`W22DoTfN54#mkM!Yv>G!rH*cATX|I#5=QThN8T!`l8*r7frQbKE}zeo4#_m+
zR>r)Nbjf;wB2A*1{ig+@pnC~V}dFz;sJ2Osuu+tUR;gN}$jHDX+`(=}_Av={Q
zrI@&xCl#A}F7wR9`bEiH$~EsEi```d5p_wo?{AE_$2HwpIqp!#+u|+j>yFEx3CYu5
ziy|p@^yfNbSR~;2Cv01`S_aQB0eEcjy8r*c1y3z>6m`jy^BZ-2Nh@pv4o)Xf&6P#^
zR7uhs>%#+^uGh0(1!t-3HbJ1@0eLmXWjyx`O8QtK6fb}}L$L44UemUu5cee9Z66;b!m7M;s
z3snPjl#Mn4Yoauw;)!Yf_&52yv|1Np0wj1;V2sWw*-6p#0g+ID>Q^-AOY~_%IW0>L
zCJ$-_B*q05q}g3xv;(K;GtvvGU*@lZ{%)`s91>VCWwNGLIVc`%^H(Y3#E0vGj}w4E
zaBv>1iMTh*0~y;?W1EwMddEje_`A!d|G_{9pO%S|aV>a|C$wQHcoq@jl3H&V`-qg~
zdpu+aKYf;`{h&hvr`Xw>aph=ogqXJ);e7NfciwfG<|+&(PVkA#*{(Vb2esoU^|9U8
zeXqwFW{aaV8zAM15gtgaLqImul6e&(Q`|3=7{*Z-S6O1y78=27`Srypn~h7!E||MM
z$!=KX)-dk6w|?WX{l~_}JIvx+x+3(BYf@qVx*H4oMmQ8H*@g
zfv`VmM(ZEbOkRn(>&L%TUQm09u_<}pj}Eo8Loh{Gis#zozr6(6DoJae)53l3+Hy?s
zkugqQCebvp)o&paP-u5(e%R@XY30r49B;m(nlsnieE)QOqlr1I}Y@fR}mLp*uWi
z@Z8GHH%|t5M|{FfddxM4wSHAfa-;gL
ziGW0QlT#Buu*hg<8FJKJTmhk8Q0F;Vr+?)R0}KjNG!BUdq4JanXt8ciKD
zh#P*lOta0RDS
zBU`0p-JbROlLOb2K?aDgc8+C1N*W@uhLhNGGqnW&Fdy4bkk?xb2o~iMTCN})p`xy{
z7cNP!pO?jSwJ%xy-s%w|u*MgjiBkjvi=a+knJ2&S@2%%BQ522af_K}z+5XD~a7flb
zmqEk^zdflPgk0Skz)|H=;!Yz+bLYJ>#k1-h2yX#qk;q)9*ip>BlGN0;-ksA?TZ-v>
zs*h!c2e<=cL#L&SvVEz~>WEt_$Gj2}N(<^N1xhITzxpA`k?PZG1s;_V64S&C+b;U#
zk*}*vbHDS+HD8$D&zCn^0VDcJXUgf|E@{QfS@>y@V}Y>X4t`y7*k*SZ}DSt%>rND%6JJ(!MX1
zRknd!oUJ$;1uS+GnhH{Xk|v#FT(D*^@@?vlM{e8Idlq+^yJuE|e3KhA%1@HZ&Xac%
z5lI_{^DbEJx3D8i;*?8_d&C|WkH}eqU4C#@>)0T&E4ZNiP{;?Tu?g7V&i!j5s9ldu
zw=snUHe`@dJ+ZTq4
zt-?W%SnBgdI^IswrzY#Ot_mIltMI6xl?=cN7oTmfl0c$N*plAjMh
zdhYv|&G?(hPkqB2x!%KY-r%g++1ZaJQ$R`?^|7Igfk!u<7Hb%(m6m*;L)zD8$lO3m
zPVi*8IvVih+wBe`6}VQaEK9gc2`5fyy5zcQyIRe$W2OmHiv{Ip
z-UA>mWbE-(BHH%I9oTM;?hkg)Q^-{N0fB8o?qEFlN%RRqZn2_HgS=DpcK5lrhXjEV
zO(gFVf_F>!ObDyR`0h(^AXHeyBw`TFF6G7g0TA}pJ5HPzaTw@#OxG*6MK!0Y)deAc
zz)Icd*meH3*%j9Y>ZF^7%~Q0yu;!NB)2L7f97w9+jQ<4I<=QB*4ye;!x;mit5y3v~G
z2jOaFL&E0~#e;(>W+6_zDU2i~0U0lhL;jqbyEu4qz$^L39JWst@$;N4hBh+D=2`jD
zudT9(`=ij2r{C;bridr!A5M?uS5R9bT);5$xjs~*AGc%_dOB9jL07fH?;P<<%&1(r
zx9JgIJZXI?eR@PDx=83EHY<&2k_FP5IxsTuRTdC0;von!h-^KX*SP)jcEF+_8|4T0^L|DXrzuxd-+&h&A
z4{}!R@6m&RXq$88JqR3b@|-|MT0W;9E*qaY_cA{fLy~qbV;5_0f8ms7m7S+X4?I
zmgwSi>4*>{0DC7GSEq6i6u4E<{t93Th7*>E`OFRYX{Pdokx~xFv5MO;KZ4HALK6gF
z`DnI0U}(m`EWcHza%fD;ERE=sYi@0iG~{uwlYyq;pi0YVt~@`}Bei*l(Bh?%T9VjTfF?mXMTO
z_KYzN&g8szI`6FVN_Sd%sqOHit%0QRDo+uS)cu_$9uZ_SbwKn^Q6iYwxpIwI=hJxN
z#*d+OdO*&nVIZb}RJ(`fd5!$%(-Jc|`)1QC)r|29iG*FiS`K)Bt|{~fqp3gp@$`|0
zSLq%M*W*NC+iVu&G_~Hengl1#@(JOgc6NEUK}V_C{(ZPo?iNW4&kS~FD6q;GP^6)I
zS6uukLBy-B)L~M7ZAz?P&1u927JwH^5JcuDO#d(h^LC$D)2>WV1UH?5)```*gea
zZY~Lc!u3nXV!zh
zU}Y*Q!21Mhq$4;u>t!9y*2wD2z`dsAZ6y!|MP?R@tyE`odG;@Cgb5;pv%q{5kP+0v
zcnsj{$5GOiKH&1Sd)u!Dzx=S??`-79+~&bS08xneG`V&si~}hM?cM$r6GgSCIlh-|
zx>~e^8N8A#t>n+gv=>5Nt=0qr1-=F-_gd8xYgq#KEZz+uKvL_s{>FCzCo)j<{7pW-
z_kcX|IQt=>N?ezb*}eQ`KS|Z*@=q@lyZ(pSCqXHB*5XidhNhx+LfvCvfN3Md2KWnH
z*Us-rkMC^GeQ$!uSvDepvyKpi(;i48(khqHacLs-A8aztD9@fo96UMSS4~BC^pLO9H@QWpS=iw$`ZHbL7;Drc%L3AZJ4Q+
z2vVVeQ^T{%|CIRvC-dg`E
z6Rr&XO~yMHy2m8%nC-JB2|(!0+>!fnS~D8FgL$*v2CbMB+`4aj#k%WYf=w$BPk!A>
zdl~ac&vxB481O&vL5k<(PDNorVmapt*OYWo_9Mm2-qZ8T5VviZ^i%jbc=LLxb
z4V17>rz~#y&7{Z4Y@KgnsaPZjTAl^Itl3Ynt0md*k=x3@<%
ze^;0zJAf2r-F_1Y*vc(IXLqh8Sc6QY4o#R(@%la+P<3Z3EEX0yJKa3$ogBXF9c7+Plr)1elAkfX#(6yA(g9vv`PWS
zO!hrp5o{ucA0(XlgJ~ddt!der9-Oqyu$Ze3sBll~Z?v~CVY1EsDr{JAIlFLq<+
zE?&5BlshdgdSyKM2n)=?$k33hn)J?_BAN1A;qhHCkY&dwIyw2hIi}EfJQqNvuipZ=
z-4aIWFYxn9kah|cbsFiv(9M364M@y@+0C~r!QQ&t_idO6EV3AA_U@+XzC9QPiClhC
z+pA|ff9b(mKYQ$nGtr?-jzqn?4q5}g9;L{^wj-)!o1~!h1_)t!_A@OrJ($HsTL#EX
z?MG2peK2v1akjx(gGcOYAIiBAFZCp71T*?2e!J?Oi>qFp@cY%dv&kJ26tjp5`0W1g3e^daNwOio0BMMg{&PQtxYp!H`+@%)S+Dfa6?`zu*@5nzz3PkK
z)51K@4A15pREj%pXZ8*inttwzMNCo-1kvvXC@prfFGa6e3B2pme{!(e129ptVGRh;
z3Tbf`I@ueczHeB=35KJ-q+~D4?k|aV6-DAq_ir$Z_`dr!Ki>v=uI;bqiQi7XTjJr&
z>PosrUJX#lxH=lumB22GA)!I%?Olw9yp&~JY1b6|!2EgK%;!Qu7=%3IgJN3QYXjxy
zw+@H14Nj*|?8`LtNzJu5HiT=hb*;w#9E_yiDnVJqeUF195S0|p$m7H6`D<^msb;)|
z9b>XJFj7V3BBo!CG~ryx7tBZ&`odnfC0yboih>cns3*%z7NaVq&VxVW!0#tp%ht4XO(72WSxnb!eTIK>A&oaLaN1OWi(L2}?n5(nZ(1=7^1XCuSRNzi!MLZYTnT<1TXVq|T9&~}KeO`&H
zq;wDHDIJil{%^*vrNF@g@=u}9%7|wpAo+=RH2-V)^inPi1r>J*9nQ@sj}qQ2A-_
z>AUDn>5v*AIm_q**Q%TfP}GXBj%aJjn}28mppCppptgQ?e!dzza^d#ono8V#aGAcm
z!)j$vWQep@M1xy*3V14#8fbt)3aU~nPox^e8g<{TWrH+asK}Np=UCejoh-9xLarlX
zLC#~u-H!F=7U(hWe->xD`4+_0F>vfL0ob$$&0Qj0;K7p
z?=FQ;!MDt)@_g*#34P>k}<_`
zeTbaJkkmt4XjJaRfaspxdL|CWKJbO72n{-!hJ$f0<<2P{V|HUbGwnx6lQ+;c8v>iT
zyMSnLqWv*WbYjA5o*Lw*-{0{fZ
z|FRMV^%1h5Q^frsAtyY>$MRJogfv=vNlz6X#(h1{|kf1D6DI!#g|5oN{98+ox
z$AVNHyPo?~?nG0~dhAmq$SA`4+&%Y{!Y_VsD|8WQR2C^4YEhEYU0BY&$A(bLz%iMS
z03FeCdEwz43wOU1s+O-bpXgcq!y2l&j9tCZ*Hs`9r$5iJdn9X$!+Mbvj7H!OVREZz
zw%m-@Kto_;bd``J#*|NYv5~3ISQ9yQZ~)->0W9d%)>afZL{<(mBom=vbX7XsiI()d;
z;AGwIkLQtFX({}Srmi_Y<0+lqa6Tey?Cz8Yl{UdladmU+Xlg*8tP?);4v<+duO&})
zXCTUsBM7dm6NvT*C%>Dj9FgNxMh1M5=yCCQZ4qUy36hYYUvM478!^S`!2rO(1|w2f
zv2xA(X5IU>T2^v9zv`mnQUsvD6ln%9S_SIXf(+{GB6)|t?7OJ5d6X5)BX;9galx->
zSL*dph`EF}p)mM!KOe2Rt?p76vL~~=f3|;_<;#S?r7Bz@y88@|UZG&gv!#4VLn3pD
z^|9S|arjW4dPG^&)LNq4(R_#^ykgy2YVFGr^~oOjyprj?nLx7oO{F-khQIP4Q)4
zE<7ut_20Uk*Lm2FN{INPU}u-}n`{nsF1&ET-wJkv-9LStZ!FykK<`yN%6oxOPdzX_
ziQEP_ym25w$G-E~e({#;jZKFj!zzVmWGW|ifPKHOh_rj?$S0>!&&*|`GDo43l7;O&
z^6l)fXkE@>k`BcgK_Sfnq^h3@~
z7w6Hz0x6Z;@SwvkQ=F{0pI5kFVhko`1o|!j`Wy`L
z?Wk>mTNLQ2;YD1eL%fqm#z6VuqwbWw$>cYsctk;BT(tXWpnDl7?vi4_SbE7lyD5E>
z0_LB!m%=c0@Rm#vk3j#S-!&{RB}J$`+^Axcv0CqwIX&4|6YweBBGcvvV6n?dj&5}p
z6s^m>LIbLx8L2nc_@d{zB*-+UJH?vttgz03M!W=(V%TMYt|Us^jaOlq`=F+E#&caM
z4xO~go#x5A!IKF72`ocwq7Jfv+#vxGF0oj>#UJZ6;bXK?IlpQ>d8UqdT2CB`LZWAC
z+KCHWoa}84AEd^1575=fXQ!E5g7wj5*n<@trZJ19-hIm3SV4)1w%zL3r@D+ox8SM%A{$q5yHgZ>==9
z@KEjRC;*1RXX+<$eP-GYDW#^POFP<6*Wo>SZVoqnfi{Tl0?(M3$KQ<*zbPyd`q8or
zb6`R7=OIN7A?wsS6Ynmv4aaB!K8Fe&v30&c>>jfFRx(l1$4Hur)C~UF)Y;
z;n^<{_-kHIg7o#g*i24rpQe$Y6#tTVQfE|Svdx0f0bp6be`?VZzfK&fH@RVxjEJl(
z{}jN(JH{U+x0}6Fgs9+nbWd2M3Ny15b=$et>cxpT&n5Tl%E?^l>3%m2r?%@UNc)M$
z#nv76J!C7Y!9ge;>j!jovdr-6(}k#e`<~;3UW`Mzf~md>*58MM!G6L{hqd+R^kwwy
zlM@NeKP4IMYd~1^yfD4`FFfv={#3X?-B}7I9Rqw*Tie?ia2cOU)kL2g>guxNbV}r@
zkt^^T^;VHbIT&{<4ErQn(A8tsIv<-9Oz
zLwB6y`$bEhNIOO}oewCt3J2ZG&ZB~g@3lEDQ-u^<`3ztZbJr>ue$A!%!M3q3tqu+0
zB}_MO%(ij+Pqsz42ta-`hG+n?pH0xQ;|#zHfjdaWb2U~iD8BLBUo
z?60#4QVx+j>oM}bdR=kBJzC*3ZlXzq@6dx=ll~=jy*zSwr<_UbzWXFd9^?2;8bhI&
zb{1dc8VBV0I1!9K`2>t^8D7m@T5N^J
zHcf^5EdwE^pl@@uxx&~c!_Kf?$x3;ue0kyGm;@ejh2Xr~y#z{1$Z!ZJfME1~ipSms
z%}u-mvgrk^`6yqowEg$VlR{zKagpY}do>F`!+A>YRXA-MpleSP*Gn~1IPV7B|LA=G
z!~h3?8#uv+dcj#hlkDMH%G{TwLfNIW99%x=c(!2Wf0~haawW>}IGx1ex-1k5b#x;;
z;XtG?%mxAsP(`CI5dClVSAL*o*^>nv%3$23M+;eYJ@!g)hvNFwU%|ga$yET4&uFeq
z1_#srE>iLH73aZ@Sm{GHUl4Pb#>y_F245o4)mywBtxJOvrGb@22a%KR-S+VOSY(DF
zT!8YmpnxFxbW5lu9#^h4zb?&>6HBL{N{ogZ2Sz*kx;%_4)D8V=i4m^P^Z4^eEt;fke`L;XA!4rl~VZ-8bXhXke2Xn0FR
zx=z}prP73B!Ry|HyrBkuvc56DRl4vso7_lJ+%bV&Ho9~qNVut@scWFGn^iDQbjU+Z
zgV?}!swSyLGNNU0{YLNR(Y$tF*?2*u@podee0oFA=)H(@h`u+7r!sYw<1^h)t(GfJ
zFmV0#;@eaHetl3a;*U2@{;QFCKlK^xGrwVJcH>2$k;ZR76Vg@U5fG3NNfka}pMUqa
z%xpTiWk2w?*Cd&Sf#
zMboE{}7T~R1zi`t*ZB7A_!d?@}uDz`{{NRhG$Q>o+)2#MC
zSS}UzBsNRSw6QwZz<=b5+}&Dndk&O0vs9Z~dg%e*P%vlc5EY>(p{S(w*>C}QiIRjx
z#e2#4T);K7p6dxo@sC~uhq$_P1yHcHY&7H0%+82spCAzTLHgvkdCv9P?YvCH>lYyl
z?qyNH%&$k*te-I4l;(@dF1b^?{d3bg^I`H!B|stx1*8Y!W?K>wNtv|TZy@doIgI#4#i2cK7ERb_dZL)7bwBX9Ulq)o@}Q#4}~WxT|p%}i4M@w~8_
z>_(h4I4|K>9wZwOK;O63$hpou(n>l!s`I=ZA)&c?M%$$B|3D1(x)-j;vsr6-}y5=#2S_oKe&M@PDM~^X&*R`%h13g6(
z9@^z6vTV!R`kX^hcwHt52V=`)hI}Q)$L065m
zkB~N01rm_?Et$xyq$k!ZrwE)E$UYG;5vsg4P(GD(c8fNymb?`!RbrjMQAl&}Ih7ma
z8(U;=(!+AMUR$)LNA2!7$QQoPM9Fcb@WU(%^9U#~O_mrZ(j{$FcCZ#KH32qrXmzFr
z6(d7BAxu~GC1;I14N^=a
%>L(d4Wkvq2^4Z
z@JAd+A1Qu0X}!2?rf2M#(Y}u6_7WB4vRAilBgih$t<&6(uF48&SzkDBY!mf^-N-cPrf?
zA`Q|Fq97g8A@CcU^nKoQp6guS`~A0cvDcbw&N0UvdEdu?Un$y(qVJE!%<`8GE@CV;
zy7T~dAEenBV!3vWZ2z{SJPLHv`H4|FpBe?PZLjp5ttF;s?c=utF5j4%`c4L5!v)6M
zT!at9*e6bO$)QtgiPFl*?6Ka+*fJ33)?Wz4PeTx&s=0(_tC3uqyHtU_a|N0a&G3CR
zJdd;F;Gyc%P*L4Q#Xg{0L-pAyPncG5NX@=$i5Y>@!xpOMQe0sRf}su@-{q
z<(5tu(B^c1SuppHT3QEeM>u^Xp(#ACoEoURXdor9I4{a`z$%&`WNda$5J}1&Nj)AXtO%eFnBso9#@8Yr
zdwj$}Xc9tJ=2UkMGU$%s)IfsQ(xfT--M=>YvCs%ec=nSnI<2v<`@zLtR}Xd}SgzK*
z2pq+;Wjm_7*17T6O-7AglWK)WQD=o7h}*ubYHjB6`#rfHogrj5-69a1oh|)yz%poQ
zc7Xa>*8Ze$Gn|D2inBVtEDai;OVfwBQs@UTD)}T0197h05Fy#Hr
zy`-Zk38MtU(9>pneatar?B`EmaP=4^oUZSIkVu%qXi}PZ9x0)Gf>AYwvS+F(1xBO7
zSqiq@{YA~EQEi^WZJLemSm++T_0jA@;gK*m7Me(RsC&G9^Oj+^t>yDwgnj=G=+-(E
zYiE5BgP$Rr0nt{JLYKk7$Y(-v-_k8HpWYl~wR-(TL7nu%jhi
zBhZBQdyzpfn0N~C3GzlX^-9yr&={US%fKM->`9*xVyl9wb$Uo?`
zU0VmvbaW-awhBg}hM*~-1aue991{n;vpAus>@oB;EZxr;nB
z{WnmlhF7j&>fWziBmVimf{!T=_myn1zXAV5
z#Ggv(cj;@-Zl6R4knv+iFb@B`h*swLw)9DYi
zoKqBz0E|jYLvuVeO)QL~wZM4nLRwhI{uevQIr_Fr(hdT7OUil39y#vdv?u#8bTG18
z6sA~s`3Icl(%9Qjhe5*K^=7;q$-m51+jP|;1V>+#^vKlrm46JnD-(=r1kOb^15^W;
z@b@!P%zHx7Diyg`KN+hvng}RvpG6TE2_#&NBXJhuul@Z4H5k!M|ArKTk)OQB{uN}*
ziQCR59d?I#_n-q7hu^19pZ>ZBJyOr0{HIxuNOun|3z`@F_bS!Wo#?ZKFi#0;I8IOB
zNb-pYs*{;lF(U}mUGx8={^Dpelwe5?3v^?Ow-ud~8r8!@q=oLdz&)-U*E(EI7B<+7vC0$%S@Lj(4QjTk}a=$aradZRI?s>%IU*~^gJ+Y-ItPLFTI2oo1&vW#$j
zord|Z@ca6nGk^D`lu%X$Yb?3~FV4(6HG1^odHQ+qV`@WKkmyrMa7LI+cekusDeOXg
z!ROs)fE|NMY~C+JFNMuZd?hmLeP?1g!$Y%;vY$5xrLI!NX%M$;OP@Jhygf(^_-75X
zV@SuMrpN0uU%U8`$8%CsxgLc+{xFTlN~Hn9Z~_oApV1A2;d8jV
z|M}`M9HFfM5q9v-R>jA_{ks5UDp-@<**a1>Ezu7YyMpWf(BS|0)Kd&_l>hS8C^>qw
z(3KAWuC)$r^%xo`d{dN^(bn-_IeOtgUkS3nn
zwCTM1znQsHYhG|bTA`Z#_r3rQZU3O4`tMclbnO}95j?&D0Rg$D@qeqV4t@h_VE5Ob
zQ52Y5rV4CFPYb|BIq8`I4TvCKSX_()MmQP8Yph3PVgER94hWP3COX;~Gkx?Pbu3&w
zJPj}}9h=($WtPKi+KuG!We3CfyLG*fqQMkqFFj)M@&1jD>PXfbC}ffN3WcP??&s)z
z%f9&^kpdp9{{LPKM@sPCLShN{J|5lsjfX0T1|aRz@{jfpy4A8Lo&WrON5e(901sv3
zSC@p|gD*u~JAQDGCF&-Kg~dXJRj`EV1Ek%ZL|W(%&S1hxBq6RdK5~b6(ZDu3yy;|I$FQKx%$TE@Ga>iSLvI>d)~$9pxguC5Wg)^4vAhNz|Qo`EB8Ob
z1N|@ZMg*Q`2>RRmr;irl>UntpFhcDvM~2HrHCHm{N;mmSua+J`*8exrLqn|s0BG*d
zEv$lk=CjxSfq};+&AQcHg}}i80upK5e7$Nt{OzvYhLL`
z5d5p@Pe)CEjz-e?gB|T%fnK(=U6iU^z(^j+6gKIRfHJ)gXY?Phz5zpT5-)G*5jC*)
zD|FuNwU|_uB3222#+wv^YIMI3u|&Jg5&t~zyI=3v4kTwGiZc&%jwYY$ILO2Uuu(3p
zp)gS6fcoHaX|2G)bpF>_BYzJXcLN3ey!Qc!UYt~=JTXvg7|XgnKwY5IN;xwrwo#A0IbQ~Sw-FQ`E{
zLU-ar*zf2pP+eLC)u9v^REdm-qyI0f@e!0HiIRBdkzyX1e(%*mi$Q+3}rB^wy%4h)IwIg~C(8vM3O9&TMEaia+{g*{J9{T}A
zgvfyF&C!`Fp%57vi2$-~7#bk{px_+;FGFjM6VYf6=XOLlo#~YU(uRQl%g`Q078fg@
zV^CEd=3UuytRnx-NdPSFsOTix)s!RY5Io_C;v9c-mJZ#y7C5N@SoNB~FwK8ZvH$(t
z>5zlHKtSSi{b&(>9}37{)jXTi6t0)8Xt@5(3Xq?ofNgc#=^4w>U&}CjfF#*EM87ex
zdhB<$;-G$6M(vjHKkBqUZXiP7Q$_t;Vo8pkcXg^OBM6|GuxU`0ssI81@`$MNzv;Ip
zl<=MW-l<1_9XW|Yc8*O=CjR4|cB7lM@1$NvBAAqShtkr~+4L#`1B{XuYjA7F&9
z19J+Fgvmtf1E6Ff*u`-E`&mEwp5vbdVM}?CD;)hbqT|pqC5kqm8-&tXl^m?6Q-A#E
zXtDm|Di%c&&aC?|`5=^g{8?ssQy8a4)tj?uPQYw80dC(lKMC`Hv)iBCggN^en|fp^
zMqbB6F{l7M)(O4B{rF8X4Q}5q$geXK6p2v0p}VFTnFn^Ke82K
z(GjSgzN}M;^#4ZbzBmU;SL?Ct5gxhs1;^#7u6kHYjEkI*Ssy9KIQg*QOAo3q;18+#
z`xPO+3g5~{5~6!(OitXCkdPpM@Zf=??xbD<#}qBzzj@B_&tkBi(>D|d5C0l*78gAH
zhi}~7F{sA-3$ogiq!MsNKF1%OvIlbYzYhgLUerO=72kdsAFUB|bjCX`n`jQ2h>C%M
zfr38qh#>m!zis#Pf1wym85BDIa71sRg-u99F=Ak9>WzuE1o2O~%`X2ul%qwhA%Z~#
zx3;zu9t>FU=|Nbp9mP7?T4KG*oPJy8D7bR;