From 4dd69d79c296e8a51e9cc6a930d4bfd542aad483 Mon Sep 17 00:00:00 2001
From: zhanghy-sketchzh <1750410339@qq.com>
Date: Sun, 11 Jun 2023 18:53:44 +0800
Subject: [PATCH 01/14] docs_translation
---
docs/localse/zh_CN/LC_MESSAGES/ecosystem.po | 25 ++
.../LC_MESSAGES/getting_started/concepts.po | 25 ++
.../getting_started/getting_started.po | 156 ++++++++++
.../LC_MESSAGES/getting_started/tutorials.po | 33 +++
docs/localse/zh_CN/LC_MESSAGES/index.po | 272 ++++++++++++++++++
.../zh_CN/LC_MESSAGES/modules/connections.po | 34 +++
.../zh_CN/LC_MESSAGES/modules/index.po | 38 +++
.../zh_CN/LC_MESSAGES/modules/knownledge.po | 89 ++++++
.../localse/zh_CN/LC_MESSAGES/modules/llms.po | 56 ++++
.../zh_CN/LC_MESSAGES/modules/plugins.po | 37 +++
.../zh_CN/LC_MESSAGES/modules/prompts.po | 37 +++
.../zh_CN/LC_MESSAGES/modules/server.po | 32 +++
docs/localse/zh_CN/LC_MESSAGES/reference.po | 25 ++
.../zh_CN/LC_MESSAGES/use_cases/chatbots.po | 25 ++
.../use_cases/interacting_with_api.po | 25 ++
.../use_cases/knownledge_based_qa.po | 57 ++++
.../use_cases/query_database_data.po | 25 ++
.../use_cases/sql_generation_and_diagnosis.po | 26 ++
.../use_cases/tool_use_with_plugin.po | 25 ++
19 files changed, 1042 insertions(+)
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/ecosystem.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/getting_started/concepts.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/getting_started/getting_started.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/getting_started/tutorials.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/index.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/modules/connections.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/modules/index.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/modules/knownledge.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/modules/llms.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/modules/plugins.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/modules/prompts.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/modules/server.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/reference.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/use_cases/chatbots.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/use_cases/query_database_data.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po
create mode 100644 docs/localse/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/ecosystem.po b/docs/localse/zh_CN/LC_MESSAGES/ecosystem.po
new file mode 100644
index 000000000..4292ae853
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/ecosystem.po
@@ -0,0 +1,25 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../ecosystem.md:1 2a67e31428d84197939447c3decf9768
+msgid "Ecosystem"
+msgstr "环境系统"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/getting_started/concepts.po b/docs/localse/zh_CN/LC_MESSAGES/getting_started/concepts.po
new file mode 100644
index 000000000..6d3762944
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/getting_started/concepts.po
@@ -0,0 +1,25 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../getting_started/concepts.md:1 bbfc919428fd48f886677ada33b9c495
+msgid "Concepts"
+msgstr "概念"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/getting_started/getting_started.po b/docs/localse/zh_CN/LC_MESSAGES/getting_started/getting_started.po
new file mode 100644
index 000000000..557c0b275
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/getting_started/getting_started.po
@@ -0,0 +1,156 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../getting_started/getting_started.md:1 cf1947dea9a843dd8b6fff68642f29b1
+msgid "Quickstart Guide"
+msgstr "使用指南"
+
+#: ../../getting_started/getting_started.md:3 4184879bf5b34521a95e497f4747241a
+msgid ""
+"This tutorial gives you a quick walkthrough about use DB-GPT with you "
+"environment and data."
+msgstr "本教程为您提供了关于如何使用DB-GPT的使用指南。"
+
+#: ../../getting_started/getting_started.md:5 7431b72cc1504b8bbcafb7512a6b6c92
+msgid "Installation"
+msgstr "安装"
+
+#: ../../getting_started/getting_started.md:7 b8faf2ec4e034855a2674ffcade8cee2
+msgid "To get started, install DB-GPT with the following steps."
+msgstr "请按照以下步骤安装DB-GPT"
+
+#: ../../getting_started/getting_started.md:9 ae0f536a064647cda04ea3d253991d80
+msgid "1. Hardware Requirements"
+msgstr "1. 硬件要求"
+
+#: ../../getting_started/getting_started.md:10 8fa637100e644b478e0d6858f0a5b63d
+msgid ""
+"As our project has the ability to achieve ChatGPT performance of over "
+"85%, there are certain hardware requirements. However, overall, the "
+"project can be deployed and used on consumer-grade graphics cards. The "
+"specific hardware requirements for deployment are as follows:"
+msgstr "由于我们的项目有能力达到85%以上的ChatGPT性能,所以对硬件有一定的要求。"
+"但总体来说,我们在消费级的显卡上即可完成项目的部署使用,具体部署的硬件说明如下:"
+
+#: ../../getting_started/getting_started.md c68539579083407882fb0d28943d40db
+msgid "GPU"
+msgstr "GPU"
+
+#: ../../getting_started/getting_started.md 613fbe77d41a4a20a30c3c9a0b6ec20c
+msgid "VRAM Size"
+msgstr "显存大小"
+
+#: ../../getting_started/getting_started.md c0b7f8249d3d4c629ba5deb8188a49b4
+msgid "Performance"
+msgstr "显存大小"
+
+#: ../../getting_started/getting_started.md 5d103f7e4d1b4b6cb7358c0c717c9f73
+msgid "RTX 4090"
+msgstr "RTX 4090"
+
+#: ../../getting_started/getting_started.md 48338f6b18dc41efb3613d47b1a762a7
+#: f14d278e083440b58fc7faeed30e2879
+msgid "24 GB"
+msgstr "24 GB"
+
+#: ../../getting_started/getting_started.md dc238037ff3449cdb95cbd882d8de170
+msgid "Smooth conversation inference"
+msgstr "可以流畅的进行对话推理,无卡顿"
+
+#: ../../getting_started/getting_started.md d7f84ac79bf84cb6a453d3bfd26eb935
+msgid "RTX 3090"
+msgstr "RTX 3090"
+
+#: ../../getting_started/getting_started.md 511ee322b777476b87a3aa5624609944
+msgid "Smooth conversation inference, better than V100"
+msgstr "可以流畅进行对话推理,有卡顿感,但好于V100"
+
+#: ../../getting_started/getting_started.md 974b704e8cf84f6483774153df8a8c6c
+msgid "V100"
+msgstr "V100"
+
+#: ../../getting_started/getting_started.md 72008961ce004a0fa24b74db55fcf96e
+msgid "16 GB"
+msgstr "16 GB"
+
+#: ../../getting_started/getting_started.md 2a3b936fe04c4b7789680c26be7f4869
+msgid "Conversation inference possible, noticeable stutter"
+msgstr "可以进行对话推理,有明显卡顿"
+
+#: ../../getting_started/getting_started.md:18 fb1dbccb8f804384ade8e171aa40f99c
+msgid "2. Install"
+msgstr "2. 安装"
+
+#: ../../getting_started/getting_started.md:20 695fdb8858c6488e9a0872d68fb387e5
+msgid ""
+"This project relies on a local MySQL database service, which you need to "
+"install locally. We recommend using Docker for installation."
+msgstr "本项目依赖一个本地的 MySQL 数据库服务,你需要本地安装,推荐直接使用 Docker 安装。"
+
+#: ../../getting_started/getting_started.md:25 954f3a282ec54b11a55ebfe1f680d1df
+msgid ""
+"We use [Chroma embedding database](https://github.com/chroma-core/chroma)"
+" as the default for our vector database, so there is no need for special "
+"installation. If you choose to connect to other databases, you can follow"
+" our tutorial for installation and configuration. For the entire "
+"installation process of DB-GPT, we use the miniconda3 virtual "
+"environment. Create a virtual environment and install the Python "
+"dependencies."
+msgstr "向量数据库我们默认使用的是Chroma内存数据库,所以无需特殊安装,如果有"
+"需要连接其他的同学,可以按照我们的教程进行安装配置。整个DB-GPT的"
+"安装过程,我们使用的是miniconda3的虚拟环境。创建虚拟环境,并安装python依赖包"
+
+#: ../../getting_started/getting_started.md:35 443f5f92e4cd4ce4887bae2556b605b0
+msgid "3. Run"
+msgstr "3. 运行"
+
+#: ../../getting_started/getting_started.md:36 3dab200eceda460b81a096d44de43d21
+msgid ""
+"You can refer to this document to obtain the Vicuna weights: "
+"[Vicuna](https://github.com/lm-sys/FastChat/blob/main/README.md#model-"
+"weights) ."
+msgstr "关于基础模型, 可以根据[Vicuna](https://github.com/lm-sys/FastChat/b"
+"lob/main/README.md#model-weights) 合成教程进行合成。"
+
+
+#: ../../getting_started/getting_started.md:38 b036ca6294f04bceb686187d2d8b6646
+msgid ""
+"If you have difficulty with this step, you can also directly use the "
+"model from [this link](https://huggingface.co/Tribbiani/vicuna-7b) as a "
+"replacement."
+msgstr "如果此步有困难的同学,也可以直接使用[此链接](https://huggingface.co/Tribbiani/vicuna-7b)上的模型进行替代。"
+
+#: ../../getting_started/getting_started.md:40 35537c13ff6f4bd69951c486274ca1f9
+msgid "Run server"
+msgstr "运行模型服务"
+
+#: ../../getting_started/getting_started.md:45 f7aa3668a6c94fb3a1b8346392d921f3
+msgid "Run gradio webui"
+msgstr "运行模型服务"
+
+#: ../../getting_started/getting_started.md:51 d80c908f01144e2c8a15b7f6e8e7f88d
+msgid ""
+"Notice: the webserver need to connect llmserver, so you need change the"
+" .env file. change the MODEL_SERVER = \"http://127.0.0.1:8000\" to your "
+"address. It's very important."
+msgstr "注意: 在启动Webserver之前, 需要修改.env 文件中的MODEL_SERVER"
+" = "http://127.0.0.1:8000", 将地址设置为你的服务器地址。"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/getting_started/tutorials.po b/docs/localse/zh_CN/LC_MESSAGES/getting_started/tutorials.po
new file mode 100644
index 000000000..eef93efd5
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/getting_started/tutorials.po
@@ -0,0 +1,33 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../getting_started/tutorials.md:1 12b03941d64f4bdf96eaaeec0147a387
+msgid "Tutorials"
+msgstr "教程"
+
+#: ../../getting_started/tutorials.md:4 b966c15b01f94a1e84d4b6142b8f4111
+msgid "This is a collection of DB-GPT tutorials on Medium."
+msgstr "这是知乎上DB-GPT教程的集合。."
+
+#: ../../getting_started/tutorials.md:6 869431aac3864180acb41b852d48d29e
+msgid "Comming soon..."
+msgstr "未完待续"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/index.po b/docs/localse/zh_CN/LC_MESSAGES/index.po
new file mode 100644
index 000000000..19559f50e
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/index.po
@@ -0,0 +1,272 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../index.rst:34 ../../index.rst:45 e3275f133efd471582d952301a6e243e
+msgid "Getting Started"
+msgstr "开始"
+
+#: ../../index.rst:56 ../../index.rst:75 86e2ce002e604304a4032aa1555b36cb
+msgid "Modules"
+msgstr "模块"
+
+#: ../../index.rst:88 ../../index.rst:104 b15c23cfcc084df9a8f8f9990e6903ac
+msgid "Use Cases"
+msgstr "示例"
+
+#: ../../index.rst:118 ../../index.rst:121 70605b76fe5348299dd5d48d8ab6a77c
+msgid "Reference"
+msgstr "参考"
+
+#: ../../index.rst:145 ../../index.rst:151 f62cf565fab64977b0efbd50e83540cc
+msgid "Resources"
+msgstr "资源"
+
+#: ../../index.rst:7 c8b3a0ca759f432095161f7baccde1c4
+msgid "Welcome to DB-GPT!"
+msgstr "欢迎来到DB-GPT中文文档"
+
+#: ../../index.rst:8 0167fea2c4df4181bc10d6e71527d005
+msgid ""
+"As large models are released and iterated upon, they are becoming "
+"increasingly intelligent. However, in the process of using large models, "
+"we face significant challenges in data security and privacy. We need to "
+"ensure that our sensitive data and environments remain completely "
+"controlled and avoid any data privacy leaks or security risks. Based on "
+"this, we have launched the DB-GPT project to build a complete private "
+"large model solution for all database-based scenarios. This solution "
+"supports local deployment, allowing it to be applied not only in "
+"independent private environments but also to be independently deployed "
+"and isolated according to business modules, ensuring that the ability of "
+"large models is absolutely private, secure, and controllable."
+msgstr "随着大型模型的发布和迭代,它们变得越来越智能。然而,在使用大型模型的过程中,"
+"我们在数据安全和隐私方面面临着重大挑战。我们需要确保我们的敏感数据和环境得到完全控制,"
+"避免任何数据隐私泄露或安全风险。基于此,我们启动了DB-GPT项目,为所有基于数据库的"
+"场景构建一个完整的私有大模型解决方案。该方案“”支持本地部署,既可应用于“独立私"
+"有环境”,又可根据业务模块进行“独立部署”和“隔离”,确保“大模型”的能力绝对"
+"私有、安全、可控。"
+
+#: ../../index.rst:10 36b847a04d624286a4942cd77821da8c
+msgid ""
+"**DB-GPT** is an experimental open-source project that uses localized GPT"
+" large models to interact with your data and environment. With this "
+"solution, you can be assured that there is no risk of data leakage, and "
+"your data is 100% private and secure."
+msgstr "DB-GPT 是一个开源的以数据库为基础的GPT实验项目,使用本地化的"
+"GPT大模型与您的数据和环境进行交互,无数据泄露风险"
+"100% 私密,100% 安全。"
+
+#: ../../index.rst:12 d20166d203934385b811740f4d5eda33
+msgid "**Features**"
+msgstr "特性"
+
+#: ../../index.rst:13 03f9de47513b4bc9a26f31e1d2d8ad60
+msgid ""
+"Currently, we have released multiple key features, which are listed below"
+" to demonstrate our current capabilities:"
+msgstr "目前我们已经发布了多种关键的特性,这里一一列举展示一下当前发布的能力。"
+
+#: ../../index.rst:15 abc51c99bc6e49d5b0105c7d95e391da
+msgid "SQL language capabilities - SQL generation - SQL diagnosis"
+msgstr "SQL语言能力 - SQL生成 - SQL诊断"
+
+#: ../../index.rst:19 e9ba27f21fd84ecf973640fa021b06b6
+msgid ""
+"Private domain Q&A and data processing - Database knowledge Q&A - Data "
+"processing"
+msgstr "私有领域问答与数据处理 - 数据库知识问答 - 数据处理"
+
+#: ../../index.rst:23 a4584012b6634553abef5a4ee6ddf509
+msgid ""
+"Plugins - Support custom plugin execution tasks and natively support the "
+"Auto-GPT plugin, such as:"
+msgstr "插件模型 - 支持自定义插件执行任务,并原生支持Auto-GPT插件,例如:"
+"* SQL自动执行,获取查询结果 * 自动爬取学习知识"
+
+#: ../../index.rst:26 b08674d7a7da4405b9388e296bc2cd57
+msgid ""
+"Unified vector storage/indexing of knowledge base - Support for "
+"unstructured data such as PDF, Markdown, CSV, and WebURL"
+msgstr "知识库统一向量存储/索引 - 非结构化数据支持包括PDF、MarkDown、CSV、WebURL"
+
+#: ../../index.rst:29 cf4bc81d46b4418b81a78242cbc7f984
+msgid ""
+"Milti LLMs Support - Supports multiple large language models, currently "
+"supporting Vicuna (7b, 13b), ChatGLM-6b (int4, int8) - TODO: codegen2, "
+"codet5p"
+msgstr "多模型支持 - 支持多种大语言模型, 当前已支持Vicuna(7b,13b), ChatGLM-6b(int4, int8)"
+Guanaco, Goriila, Falcon等系列模型"
+
+#: ../../index.rst:35 681ae172eea64b718e0f6fc734d041b1
+msgid ""
+"How to get started using DB-GPT to interact with your data and "
+"environment."
+msgstr "开始使用DB-GPT与您的数据环境进行交互。"
+
+#: ../../index.rst:36 87f507e0c27a4a38ba2a5c19e804549f
+msgid "`Quickstart Guid <./getting_started/getting_started.html>`_"
+msgstr "`使用指南<./getting_started/getting_started.html>`_"
+
+#: ../../index.rst:38 ab35a5cd96c548ecb0c285fd822f652a
+msgid "Concepts and terminology"
+msgstr "相关概念"
+
+#: ../../index.rst:40 3fbd5c96df084ef889442a0b89ad6c05
+msgid "`Concepts and terminology <./getting_started/concepts.html>`_"
+msgstr "相关概念 <./getting_started/concepts.html>`_"
+
+#: ../../index.rst:42 6d9a0d727ce14edfbdcf678c6fbba76b
+msgid "Coming soon..."
+msgstr "未完待续。。。"
+
+#: ../../index.rst:44 58cdc41dce264a3e83de565501298010
+msgid "`Tutorials <.getting_started/tutorials.html>`_"
+msgstr "`教程 <.getting_started/tutorials.html>`_"
+
+#: ../../index.rst:58 20d67b324c23468e8f2cac6d9100b9f5
+msgid ""
+"These modules are the core abstractions with which we can interact with "
+"data and environment smoothly."
+msgstr "这些模块是我们可以与数据和环境顺利地进行交互的核心组成。"
+
+
+#: ../../index.rst:59 45a14052370f4860a72d8e831269d184
+msgid ""
+"It's very important for DB-GPT, DB-GPT also provide standard, extendable "
+"interfaces."
+msgstr "DB-GPT还提供了标准的、可扩展的接口。"
+
+#: ../../index.rst:61 7c78c2ddc4104a8b9688472072c3225c
+msgid ""
+"The docs for each module contain quickstart examples, how to guides, "
+"reference docs, and conceptual guides."
+msgstr "每个模块的文档都包含快速入门的例子、操作指南、参考文档和相关概念等内容。"
+
+#: ../../index.rst:63 4bcc203282434ca9b77d20c4115a646a
+msgid "The modules are as follows"
+msgstr "组成模块如下:"
+
+#: ../../index.rst:65 c87f13e106b5443a824df5ca85331df4
+msgid ""
+"`LLMs <./modules/llms.html>`_: Supported multi models management and "
+"integrations."
+msgstr "`LLMs <./modules/llms.html>`_:基于FastChat提供大模型的运行环境。支持多模型管理和集成。 "
+
+#: ../../index.rst:67 3447e10b61804b48a786ee12beaaedfd
+msgid ""
+"`Prompts <./modules/prompts.html>`_: Prompt management, optimization, and"
+" serialization for multi database."
+msgstr "`Prompt自动生成与优化 <./modules/prompts.html>`_: 自动化生成高质量的Prompt"
+" ,并进行优化,提高系统的响应效率"
+
+#: ../../index.rst:69 a3182673127141888fdc13560e7dcfb3
+msgid "`Plugins <./modules/plugins.html>`_: Plugins management, scheduler."
+msgstr "`Agent与插件: <./modules/plugins.html>`_:提供Agent和插件机制,使得用户可以自定义并增强系统的行为。"
+
+#: ../../index.rst:71 66abfffcb9c0466f9a3988ecfb19fc9e
+msgid ""
+"`Knownledge <./modules/knownledge.html>`_: Knownledge management, "
+"embedding, and search."
+msgstr "`知识库能力: <./modules/knownledge.html>`_: 支持私域知识库问答能力, "
+
+#: ../../index.rst:73 1027a33646614790a4d88f29285ab0fd
+msgid ""
+"`Connections <./modules/connections.html>`_: Supported multi databases "
+"connection. management connections and interact with this."
+msgstr "`连接模块 <./modules/connections.html>`_: 用于连接不同的模块和数据源,实现数据的流转和交互 "
+
+
+#: ../../index.rst:90 53b58e6e531841878fbc8616841d5e9e
+msgid "Best Practices and built-in implementations for common DB-GPT use cases:"
+msgstr "DB-GPT用例的最佳实践和内置方法:"
+
+#: ../../index.rst:92 a5c664233fe04417ba9bb0415fd686d7
+msgid ""
+"`Sql generation and diagnosis "
+"<./use_cases/sql_generation_and_diagnosis.html>`_: SQL generation and "
+"diagnosis."
+msgstr "`Sql生成和诊断 "
+"<./use_cases/sql_generation_and_diagnosis.html>`_: Sql生成和诊断。"
+
+
+#: ../../index.rst:94 04c63b56e77b45e5b4e7bd1db45ea10f
+msgid ""
+"`knownledge Based QA <./use_cases/knownledge_based_qa.html>`_: A "
+"important scene for user to chat with database documents, codes, bugs and"
+" schemas."
+msgstr "`知识库问答 <./use_cases/knownledge_based_qa.html>`_: "
+"用户与数据库文档、代码和bug聊天的重要场景"。
+
+#: ../../index.rst:96 415e2b9f640341a084f893781e2b3ec0
+msgid ""
+"`Chatbots <./use_cases/chatbots.html>`_: Language model love to chat, use"
+" multi models to chat."
+msgstr "`聊天机器人 <./use_cases/chatbots.html>`_: 使用多模型进行对话"
+
+#: ../../index.rst:98 59a7ec39d2034fb794a9272d55607122
+msgid ""
+"`Querying Database Data <./use_cases/query_database_data.html>`_: Query "
+"and Analysis data from databases and give charts."
+msgstr "`查询数据库数据 <./use_cases/query_database_data.html>`_:"
+"从数据库中查询和分析数据并给出图表。"
+
+#: ../../index.rst:100 3bd098eda9044bd39e4bba28a82f4195
+msgid ""
+"`Interacting with apis <./use_cases/interacting_with_api.html>`_: "
+"Interact with apis, such as create a table, deploy a database cluster, "
+"create a database and so on."
+msgstr "`API交互 <./use_cases/interacting_with_api.html>`_: "
+"与API交互,例如创建表、部署数据库集群、创建数据库等。"
+
+
+#: ../../index.rst:102 66daab899d7b4e528eda70779ab79676
+msgid ""
+"`Tool use with plugins <./use_cases/tool_use_with_plugin>`_: According to"
+" Plugin use tools to manage databases autonomoly."
+msgstr "`插件工具 <./use_cases/tool_use_with_plugin>`_:"
+" 根据插件使用工具自主管理数据库。"
+
+#: ../../index.rst:119 e5a84e2dc87d4a06aa77ef4d77fb7bcb
+msgid ""
+"Full documentation on all methods, classes, installation methods, and "
+"integration setups for DB-GPT."
+msgstr "关于DB-GPT的所有方法、类、安装方法和集成设置的完整文档。"
+
+#: ../../index.rst:130 7c51e39ad3824c5f8575390adbcba738
+msgid "Ecosystem"
+msgstr "环境系统"
+
+#: ../../index.rst:132 b59e9ddba86945c1bebe395b2863174c
+msgid "Guides for how other companies/products can be used with DB-GPT"
+msgstr "其他公司/产品如何与DB-GPT一起使用的方法指南"
+
+#: ../../index.rst:147 992bf68cc48a425696c02429d39f86e3
+msgid ""
+"Additional resources we think may be useful as you develop your "
+"application!"
+msgstr "“我们认为在您开发应用程序时可能有用的其他资源!”"
+
+#: ../../index.rst:149 d99277006b05438c8d2e8088242f239c
+msgid ""
+"`Discord `_: if your have some "
+"problem or ideas, you can talk from discord."
+msgstr "`Discord `_:"
+"如果您有任何问题,可以到discord中进行交流。"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/connections.po b/docs/localse/zh_CN/LC_MESSAGES/modules/connections.po
new file mode 100644
index 000000000..19b52157d
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/modules/connections.po
@@ -0,0 +1,34 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../modules/connections.md:1 21de23e95a6c4405a242fb9a0f4e5f2b
+msgid "Connections"
+msgstr "连接模块"
+
+#: ../../modules/connections.md:3 0f09b3be20cd409f92c2ba819dbf45eb
+msgid ""
+"In order to interact more conveniently with users' private environments, "
+"the project has designed a connection module, which can support "
+"connection to databases, Excel, knowledge bases, and other environments "
+"to achieve information and data exchange."
+msgstr "为了更方便地与用户的私有环境进行交互,项目设计了一个连接模块,可以支持"
+"与数据库、Excel、知识库等环境的连接,实现信息和数据的交换。"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/index.po b/docs/localse/zh_CN/LC_MESSAGES/modules/index.po
new file mode 100644
index 000000000..fa030528f
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/modules/index.po
@@ -0,0 +1,38 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../modules/index.md:1 a7cda547b08244fdad5efc00b164432d
+msgid "Vector storage and indexing"
+msgstr "向量存储和索引"
+
+#: ../../modules/index.md:3 fcbfbe3dda3d47d8a8ca2fefb1750b9a
+msgid ""
+"In order to facilitate the management of knowledge after vectorization, "
+"we have built-in multiple vector storage engines, from memory-based "
+"Chroma to distributed Milvus. Users can choose different storage engines "
+"according to their own scenario needs. The storage of knowledge vectors "
+"is the cornerstone of AI capability enhancement. As the intermediate "
+"language for interaction between humans and large language models, "
+"vectors play a very important role in this project."
+msgstr "为了便于知识向量化后的管理,我们内置了多个向量存储引擎,从基于内存的Chroma"
+"到分布式的Milvus。用户可以根据自己的场景需求选择不同的存储引擎。知识向量的存储是增"
+"强人工智能能力的基石。作为人类和大型语言模型之间交互的中间语言,向量在这个项目中扮演"
+"着非常重要的角色。"
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/knownledge.po b/docs/localse/zh_CN/LC_MESSAGES/modules/knownledge.po
new file mode 100644
index 000000000..fd37a7565
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/modules/knownledge.po
@@ -0,0 +1,89 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../modules/knownledge.md:1 ac3aa55568c0414a821a42aeed509ab2
+msgid "Knownledge"
+msgstr "知识"
+
+#: ../../modules/knownledge.md:3 1d57e3d2d790437ea54730477c67fdfb
+msgid ""
+"As the knowledge base is currently the most significant user demand "
+"scenario, we natively support the construction and processing of "
+"knowledge bases. At the same time, we also provide multiple knowledge "
+"base management strategies in this project, such as:"
+msgstr "由于知识库是当前用户需求最显著的场景,我们原生支持知识库的构建和处理。"
+"同时,我们还在本项目中提供了多种知识库管理策略,如:"
+
+#: ../../modules/knownledge.md:4 784708fc19334742b73549d92a21ed32
+msgid "Default built-in knowledge base"
+msgstr "默认内置知识库"
+
+#: ../../modules/knownledge.md:5 c65ccfabe79348c09e6fc13a10774ffd
+msgid "Custom addition of knowledge bases"
+msgstr "自定义新增知识库"
+
+#: ../../modules/knownledge.md:6 fc8fded3e3634edfbe6001d9ea1add90
+msgid ""
+"Various usage scenarios such as constructing knowledge bases through "
+"plugin capabilities and web crawling. Users only need to organize the "
+"knowledge documents, and they can use our existing capabilities to build "
+"the knowledge base required for the large model."
+msgstr "各种使用场景,例如通过插件功能和爬虫构建知识库。用户只需要组织知识文档,"
+"并且他们可以使用我们现有的功能来构建大型模型所需的知识库。"
+
+#: ../../modules/knownledge.md:9 2fa8ae0edeef4380ab60c43754d93c93
+msgid "Create your own knowledge repository"
+msgstr "创建你自己的知识库"
+
+#: ../../modules/knownledge.md:11 13dc4cea806e42c4887c45bbd84fb063
+msgid ""
+"1.Place personal knowledge files or folders in the pilot/datasets "
+"directory."
+msgstr "1.将个人知识文件或文件夹放在pilot/datasets目录中。"
+
+#: ../../modules/knownledge.md:13 8dbf51249c9d47749e3fedbf9886479b
+msgid ""
+"2.Update your .env, set your vector store type, VECTOR_STORE_TYPE=Chroma "
+"(now only support Chroma and Milvus, if you set Milvus, please set "
+"MILVUS_URL and MILVUS_PORT)"
+msgstr "2.更新你的.env,设置你的向量存储类型,VECTOR_STORE_TYPE=Chroma(现在只支持"
+"Chroma和Milvus,如果你设置了Milvus,请设置MILVUS_URL和MILVUS_PORT)"
+
+#: ../../modules/knownledge.md:16 e03cce8ad3b14100b8bb22dd98ea49ae
+msgid "2.Run the knowledge repository script in the tools directory."
+msgstr "2.在tools目录执行知识入库脚本"
+
+#: ../../modules/knownledge.md:26 a2919580cc324820b1217e31c8b22203
+msgid ""
+"3.Add the knowledge repository in the interface by entering the name of "
+"your knowledge repository (if not specified, enter \"default\") so you "
+"can use it for Q&A based on your knowledge base."
+msgstr "如果选择新增知识库,在界面上新增知识库输入你的知识库名"
+
+#: ../../modules/knownledge.md:28 236317becbb042f2acbf66c499a3b984
+msgid ""
+"Note that the default vector model used is text2vec-large-chinese (which "
+"is a large model, so if your personal computer configuration is not "
+"enough, it is recommended to use text2vec-base-chinese). Therefore, "
+"ensure that you download the model and place it in the models directory."
+msgstr "注意,这里默认向量模型是text2vec-large-chinese(模型比较大,如果个人电脑"
+"配置不够建议采用text2vec-base-chinese),因此确保需要将模型download下来放到models目录中。"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/llms.po b/docs/localse/zh_CN/LC_MESSAGES/modules/llms.po
new file mode 100644
index 000000000..08fd5d984
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/modules/llms.po
@@ -0,0 +1,56 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../modules/llms.md:1 9c05a511436b4a408e2d1acd2f2568e7
+msgid "LLMs"
+msgstr "大语言模型"
+
+#: ../../modules/llms.md:3 c6549cbde17e42e596470a537286cedb
+#, python-format
+msgid ""
+"In the underlying large model integration, we have designed an open "
+"interface that supports integration with various large models. At the "
+"same time, we have a very strict control and evaluation mechanism for the"
+" effectiveness of the integrated models. In terms of accuracy, the "
+"integrated models need to align with the capability of ChatGPT at a level"
+" of 85% or higher. We use higher standards to select models, hoping to "
+"save users the cumbersome testing and evaluation process in the process "
+"of use."
+msgstr "在底层大模型接入中,我们设计了开放的接口,支持对接多种大模型。同时对于接入模型的效果,"
+"我们有非常严格的把控与评审机制。对大模型能力上与ChatGPT对比,在准确率上需要满足85%"
+"以上的能力对齐。我们用更高的标准筛选模型,是期望在用户使用过程中,可以省去前面繁琐的测试评估环节。"
+
+#: ../../modules/llms.md:5 1b18ef91924442f7ab7a117aec6122d5
+msgid "Multi LLMs Usage"
+msgstr "多模型使用"
+
+#: ../../modules/llms.md:6 b14256f1768d45ef929be664b8afb31e
+msgid ""
+"To use multiple models, modify the LLM_MODEL parameter in the .env "
+"configuration file to switch between the models."
+msgstr "如果要使用不同的模型,请修改.env配置文件中的LLM MODEL参数以在模型之间切换。"
+
+#: ../../modules/llms.md:8 42cbe90a1a524d8381a0a743ef1a927e
+msgid ""
+"Notice: you can create .env file from .env.template, just use command "
+"like this:"
+msgstr "注意:你可以从 .env.template 创建 .env 文件。只需使用如下命令:"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/plugins.po b/docs/localse/zh_CN/LC_MESSAGES/modules/plugins.po
new file mode 100644
index 000000000..483761bac
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/modules/plugins.po
@@ -0,0 +1,37 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../modules/plugins.md:1 48f1b7ff4099485ba3853c373e64273f
+msgid "Plugins"
+msgstr "插件"
+
+#: ../../modules/plugins.md:3 3d94b3250511468d80aa29359f01128d
+msgid ""
+"The ability of Agent and Plugin is the core of whether large models can "
+"be automated. In this project, we natively support the plugin mode, and "
+"large models can automatically achieve their goals. At the same time, in "
+"order to give full play to the advantages of the community, the plugins "
+"used in this project natively support the Auto-GPT plugin ecology, that "
+"is, Auto-GPT plugins can directly run in our project."
+msgstr "Agent与插件能力是大模型能否自动化的核心,在本的项目中,原生支持插件模式,"
+"大模型可以自动化完成目标。 同时为了充分发挥社区的优势,本项目中所用的插件原生支持"
+"Auto-GPT插件生态,即Auto-GPT的插件可以直接在我们的项目中运行。"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/prompts.po b/docs/localse/zh_CN/LC_MESSAGES/modules/prompts.po
new file mode 100644
index 000000000..b2229376a
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/modules/prompts.po
@@ -0,0 +1,37 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../modules/prompts.md:1 bb9583334e6948b98b59126234ae045f
+msgid "Prompts"
+msgstr ""
+
+#: ../../modules/prompts.md:3 e6f5129e260c4a739a40115fff82850f
+msgid ""
+"Prompt is a very important part of the interaction between the large "
+"model and the user, and to a certain extent, it determines the quality "
+"and accuracy of the answer generated by the large model. In this project,"
+" we will automatically optimize the corresponding prompt according to "
+"user input and usage scenarios, making it easier and more efficient for "
+"users to use large language models."
+msgstr "Prompt是与大模型交互过程中非常重要的部分,一定程度上Prompt决定了"
+"大模型生成答案的质量与准确性,在本的项目中,我们会根据用户输入与"
+"使用场景,自动优化对应的Prompt,让用户使用大语言模型变得更简单、更高效。"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/server.po b/docs/localse/zh_CN/LC_MESSAGES/modules/server.po
new file mode 100644
index 000000000..b4bd0665b
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/modules/server.po
@@ -0,0 +1,32 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../modules/server.md:1 e882c271ebc441bca79808bc00f2bc24
+msgid "Server"
+msgstr ""
+
+#: ../../modules/server.md:3 325cc3afd7d04e568c912bbf7f11788d
+msgid ""
+"TODO: In terms of terminal display, we will provide a multi-platform "
+"product interface, including PC, mobile phone, command line, Slack and "
+"other platforms."
+msgstr "TODO: 在终端展示上,我们将提供多端产品界面。包括PC、手机、命令行、Slack等多种模式。"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/reference.po b/docs/localse/zh_CN/LC_MESSAGES/reference.po
new file mode 100644
index 000000000..002cab91a
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/reference.po
@@ -0,0 +1,25 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../reference.md:1 83c827fb051c40d8b16f704752c9581b
+msgid "Reference"
+msgstr "参考文献"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/chatbots.po b/docs/localse/zh_CN/LC_MESSAGES/use_cases/chatbots.po
new file mode 100644
index 000000000..2d6845d8c
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/use_cases/chatbots.po
@@ -0,0 +1,25 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../use_cases/chatbots.md:1 e599819098be40759193233cc476f26a
+msgid "Chatbot"
+msgstr "聊天机器人"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po b/docs/localse/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po
new file mode 100644
index 000000000..8c69c7959
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po
@@ -0,0 +1,25 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../use_cases/interacting_with_api.md:1 2dc3e9c958e24aca90af1b0520d416b4
+msgid "Interacting with api"
+msgstr "API交互"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po b/docs/localse/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
new file mode 100644
index 000000000..26473de7f
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
@@ -0,0 +1,57 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../use_cases/knownledge_based_qa.md:1 a03c7a5aa5cc4a3e9bc7bd3734d47176
+msgid "Knownledge based qa"
+msgstr "知识问答"
+
+#: ../../use_cases/knownledge_based_qa.md:3 37607733852c4ade97c80fbcca66d573
+msgid ""
+"Chat with your own knowledge is a very interesting thing. In the usage "
+"scenarios of this chapter, we will introduce how to build your own "
+"knowledge base through the knowledge base API. Firstly, building a "
+"knowledge store can currently be initialized by executing \"python "
+"tool/knowledge_init.py\" to initialize the content of your own knowledge "
+"base, which was introduced in the previous knowledge base module. Of "
+"course, you can also call our provided knowledge embedding API to store "
+"knowledge."
+msgstr "用自己的知识聊天是一件很有趣的事情。在本章的使用场景中,"
+"我们将介绍如何通过知识库API构建自己的知识库。首先,"
+"构建知识存储目前可以通过执行“python tool/knowledge_init.py”"
+"来初始化您自己的知识库的内容,这在前面的知识库模块中已经介绍过了"
+"。当然,你也可以调用我们提供的知识嵌入API来存储知识。"
+
+#: ../../use_cases/knownledge_based_qa.md:6 ea5ad6cec29d49228c03d57d255c42fe
+msgid "We currently support four document formats: txt, pdf, url, and md."
+msgstr "“我们目前支持四种文件格式: txt, pdf, url, 和md。"
+
+#: ../../use_cases/knownledge_based_qa.md:20 01908d4b18b345908004a251462d42b3
+msgid ""
+"Now we currently support vector databases: Chroma (default) and Milvus. "
+"You can switch between them by modifying the \"VECTOR_STORE_TYPE\" field "
+"in the .env file."
+msgstr "“我们目前支持向量数据库:Chroma(默认)和Milvus。"
+"你可以通过修改.env文件中的“VECTOR_STORE_TYPE”参数在它们之间切换。"
+
+#: ../../use_cases/knownledge_based_qa.md:31 f37d80faa3f84c8cb176a59f4ff8140c
+msgid "Below is an example of using the knowledge base API to query knowledge:"
+msgstr "下面是一个使用知识库API进行查询的例子:"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/query_database_data.po b/docs/localse/zh_CN/LC_MESSAGES/use_cases/query_database_data.po
new file mode 100644
index 000000000..f33f89f97
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/use_cases/query_database_data.po
@@ -0,0 +1,25 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../use_cases/query_database_data.md:1 4a246f7052db497d990d3e65236b7c52
+msgid "Query database data"
+msgstr "查询数据库数据"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po b/docs/localse/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po
new file mode 100644
index 000000000..b7ab23dfc
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po
@@ -0,0 +1,26 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../use_cases/sql_generation_and_diagnosis.md:1
+#: 8900f8d9f3034b20a96df1d5c611eaa1
+msgid "SQL generation and diagnosis"
+msgstr "SQL生成和诊断"
+
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po b/docs/localse/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po
new file mode 100644
index 000000000..73293771d
--- /dev/null
+++ b/docs/localse/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po
@@ -0,0 +1,25 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2023, csunny
+# This file is distributed under the same license as the DB-GPT package.
+# FIRST AUTHOR , 2023.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: DB-GPT 0.1.0\n"
+"Report-Msgid-Bugs-To: \n"
+"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME \n"
+"Language: zh_CN\n"
+"Language-Team: zh_CN \n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=utf-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Generated-By: Babel 2.11.0\n"
+
+#: ../../use_cases/tool_use_with_plugin.md:1 2bd7d79a16a548c4a3872a12c436aa4f
+msgid "Tool use with plugin"
+msgstr "插件工具"
+
From e44384fc339922302d76ffff8bd72163f93a0365 Mon Sep 17 00:00:00 2001
From: csunny
Date: Sun, 11 Jun 2023 20:27:30 +0800
Subject: [PATCH 02/14] docs: add support for zh
---
docs/conf.py | 6 ++++++
docs/{localse => locales}/zh_CN/LC_MESSAGES/ecosystem.po | 0
.../zh_CN/LC_MESSAGES/getting_started/concepts.po | 0
.../zh_CN/LC_MESSAGES/getting_started/getting_started.po | 0
.../zh_CN/LC_MESSAGES/getting_started/tutorials.po | 0
docs/{localse => locales}/zh_CN/LC_MESSAGES/index.po | 0
.../zh_CN/LC_MESSAGES/modules/connections.po | 0
.../{localse => locales}/zh_CN/LC_MESSAGES/modules/index.po | 0
.../zh_CN/LC_MESSAGES/modules/knownledge.po | 0
docs/{localse => locales}/zh_CN/LC_MESSAGES/modules/llms.po | 0
.../zh_CN/LC_MESSAGES/modules/plugins.po | 0
.../zh_CN/LC_MESSAGES/modules/prompts.po | 0
.../zh_CN/LC_MESSAGES/modules/server.po | 0
docs/{localse => locales}/zh_CN/LC_MESSAGES/reference.po | 0
.../zh_CN/LC_MESSAGES/use_cases/chatbots.po | 0
.../zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po | 0
.../zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po | 0
.../zh_CN/LC_MESSAGES/use_cases/query_database_data.po | 0
.../LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po | 0
.../zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po | 0
20 files changed, 6 insertions(+)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/ecosystem.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/getting_started/concepts.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/getting_started/getting_started.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/getting_started/tutorials.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/index.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/modules/connections.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/modules/index.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/modules/knownledge.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/modules/llms.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/modules/plugins.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/modules/prompts.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/modules/server.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/reference.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/use_cases/chatbots.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/use_cases/query_database_data.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po (100%)
rename docs/{localse => locales}/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po (100%)
diff --git a/docs/conf.py b/docs/conf.py
index 9ea8cb3aa..f6d93601b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -47,6 +47,12 @@ templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
+# multi language config
+language = "en"
+locals_dirs = ['./locales/']
+gettext_compact = False
+gettext_uuid = True
+
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
diff --git a/docs/localse/zh_CN/LC_MESSAGES/ecosystem.po b/docs/locales/zh_CN/LC_MESSAGES/ecosystem.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/ecosystem.po
rename to docs/locales/zh_CN/LC_MESSAGES/ecosystem.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/getting_started/concepts.po b/docs/locales/zh_CN/LC_MESSAGES/getting_started/concepts.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/getting_started/concepts.po
rename to docs/locales/zh_CN/LC_MESSAGES/getting_started/concepts.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/getting_started/getting_started.po b/docs/locales/zh_CN/LC_MESSAGES/getting_started/getting_started.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/getting_started/getting_started.po
rename to docs/locales/zh_CN/LC_MESSAGES/getting_started/getting_started.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/getting_started/tutorials.po b/docs/locales/zh_CN/LC_MESSAGES/getting_started/tutorials.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/getting_started/tutorials.po
rename to docs/locales/zh_CN/LC_MESSAGES/getting_started/tutorials.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/index.po b/docs/locales/zh_CN/LC_MESSAGES/index.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/index.po
rename to docs/locales/zh_CN/LC_MESSAGES/index.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/connections.po b/docs/locales/zh_CN/LC_MESSAGES/modules/connections.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/modules/connections.po
rename to docs/locales/zh_CN/LC_MESSAGES/modules/connections.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/index.po b/docs/locales/zh_CN/LC_MESSAGES/modules/index.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/modules/index.po
rename to docs/locales/zh_CN/LC_MESSAGES/modules/index.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/knownledge.po b/docs/locales/zh_CN/LC_MESSAGES/modules/knownledge.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/modules/knownledge.po
rename to docs/locales/zh_CN/LC_MESSAGES/modules/knownledge.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/llms.po b/docs/locales/zh_CN/LC_MESSAGES/modules/llms.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/modules/llms.po
rename to docs/locales/zh_CN/LC_MESSAGES/modules/llms.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/plugins.po b/docs/locales/zh_CN/LC_MESSAGES/modules/plugins.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/modules/plugins.po
rename to docs/locales/zh_CN/LC_MESSAGES/modules/plugins.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/prompts.po b/docs/locales/zh_CN/LC_MESSAGES/modules/prompts.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/modules/prompts.po
rename to docs/locales/zh_CN/LC_MESSAGES/modules/prompts.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/modules/server.po b/docs/locales/zh_CN/LC_MESSAGES/modules/server.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/modules/server.po
rename to docs/locales/zh_CN/LC_MESSAGES/modules/server.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/reference.po b/docs/locales/zh_CN/LC_MESSAGES/reference.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/reference.po
rename to docs/locales/zh_CN/LC_MESSAGES/reference.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/chatbots.po b/docs/locales/zh_CN/LC_MESSAGES/use_cases/chatbots.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/use_cases/chatbots.po
rename to docs/locales/zh_CN/LC_MESSAGES/use_cases/chatbots.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po b/docs/locales/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po
rename to docs/locales/zh_CN/LC_MESSAGES/use_cases/interacting_with_api.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po b/docs/locales/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
rename to docs/locales/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/query_database_data.po b/docs/locales/zh_CN/LC_MESSAGES/use_cases/query_database_data.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/use_cases/query_database_data.po
rename to docs/locales/zh_CN/LC_MESSAGES/use_cases/query_database_data.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po b/docs/locales/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po
rename to docs/locales/zh_CN/LC_MESSAGES/use_cases/sql_generation_and_diagnosis.po
diff --git a/docs/localse/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po b/docs/locales/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po
similarity index 100%
rename from docs/localse/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po
rename to docs/locales/zh_CN/LC_MESSAGES/use_cases/tool_use_with_plugin.po
From a9c2c7f9eafa7d1d4288d7d2ec137463374e358e Mon Sep 17 00:00:00 2001
From: csunny
Date: Sun, 11 Jun 2023 20:36:52 +0800
Subject: [PATCH 03/14] docs: fix local dir name
---
docs/conf.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/conf.py b/docs/conf.py
index f6d93601b..ea3919dc5 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -49,7 +49,7 @@ exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# multi language config
language = "en"
-locals_dirs = ['./locales/']
+locales_dirs = ['./locales/']
gettext_compact = False
gettext_uuid = True
From 4c218cfbad8e57c41f0b53eeadef60cd2fe2641c Mon Sep 17 00:00:00 2001
From: csunny
Date: Sun, 11 Jun 2023 21:19:18 +0800
Subject: [PATCH 04/14] fix: multi language choose
---
docs/conf.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/conf.py b/docs/conf.py
index ea3919dc5..7dc719214 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -48,7 +48,7 @@ exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# multi language config
-language = "en"
+language = "en" # ['en', 'zh_CN'] #
locales_dirs = ['./locales/']
gettext_compact = False
gettext_uuid = True
From b9c238c04c5725cd0612a77f538d956bbff5d13c Mon Sep 17 00:00:00 2001
From: zhanghy-sketchzh <1750410339@qq.com>
Date: Sun, 11 Jun 2023 23:07:38 +0800
Subject: [PATCH 05/14] fix translate problems
---
docs/getting_started/getting_started.md | 15 ++++++++++++
.../getting_started/getting_started.po | 23 +++++++++++++++++++
docs/locales/zh_CN/LC_MESSAGES/index.po | 4 ++--
3 files changed, 40 insertions(+), 2 deletions(-)
diff --git a/docs/getting_started/getting_started.md b/docs/getting_started/getting_started.md
index caf566967..b89c01fac 100644
--- a/docs/getting_started/getting_started.md
+++ b/docs/getting_started/getting_started.md
@@ -32,6 +32,21 @@ conda activate dbgpt_env
pip install -r requirements.txt
```
+Once the environment is installed, we have to create a new folder "models" in the DB-GPT project, and then we can put all the models downloaded from huggingface in this directory
+
+```
+git clone https://huggingface.co/Tribbiani/vicuna-13b
+git clone https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2
+```
+
+The model files are large and will take a long time to download. During the download, let's configure the .env file, which needs to be copied and created from the .env.template
+
+```
+cp .env.template .env
+```
+
+You can configure basic parameters in the .env file, for example setting LLM_MODEL to the model to be used
+
### 3. Run
You can refer to this document to obtain the Vicuna weights: [Vicuna](https://github.com/lm-sys/FastChat/blob/main/README.md#model-weights) .
diff --git a/docs/locales/zh_CN/LC_MESSAGES/getting_started/getting_started.po b/docs/locales/zh_CN/LC_MESSAGES/getting_started/getting_started.po
index 557c0b275..bc3e499a5 100644
--- a/docs/locales/zh_CN/LC_MESSAGES/getting_started/getting_started.po
+++ b/docs/locales/zh_CN/LC_MESSAGES/getting_started/getting_started.po
@@ -118,6 +118,29 @@ msgstr "向量数据库我们默认使用的是Chroma内存数据库,所以无
"需要连接其他的同学,可以按照我们的教程进行安装配置。整个DB-GPT的"
"安装过程,我们使用的是miniconda3的虚拟环境。创建虚拟环境,并安装python依赖包"
+
+#: ../../getting_started/getting_started.md:35 0314bad0928940fc8e382d289d356c66
+msgid ""
+"Once the environment is installed, we have to create a new folder "
+"\"models\" in the DB-GPT project, and then we can put all the models "
+"downloaded from huggingface in this directory"
+msgstr "环境安装完成后,我们必须在DB-GPT项目中创建一个新文件夹\"models\","
+"然后我们可以把从huggingface下载的所有模型放到这个目录下。"
+
+#: ../../getting_started/getting_started.md:42 afdf176f72224fd6b8b6e9e23c80c1ef
+msgid ""
+"The model files are large and will take a long time to download. During "
+"the download, let's configure the .env file, which needs to be copied and"
+" created from the .env.template"
+msgstr "模型文件很大,需要很长时间才能下载。在下载过程中,让我们配置.env文件,"
+"它需要从。env.template中复制和创建。"
+
+#: ../../getting_started/getting_started.md:48 76c87610993f41059c3c0aade5117171
+msgid ""
+"You can configure basic parameters in the .env file, for example setting "
+"LLM_MODEL to the model to be used"
+msgstr "您可以在.env文件中配置基本参数,例如将LLM_MODEL设置为要使用的模型。"
+
#: ../../getting_started/getting_started.md:35 443f5f92e4cd4ce4887bae2556b605b0
msgid "3. Run"
msgstr "3. 运行"
diff --git a/docs/locales/zh_CN/LC_MESSAGES/index.po b/docs/locales/zh_CN/LC_MESSAGES/index.po
index 19559f50e..5d1b9c0d0 100644
--- a/docs/locales/zh_CN/LC_MESSAGES/index.po
+++ b/docs/locales/zh_CN/LC_MESSAGES/index.po
@@ -122,7 +122,7 @@ msgstr "开始使用DB-GPT与您的数据环境进行交互。"
#: ../../index.rst:36 87f507e0c27a4a38ba2a5c19e804549f
msgid "`Quickstart Guid <./getting_started/getting_started.html>`_"
-msgstr "`使用指南<./getting_started/getting_started.html>`_"
+msgstr "`使用指南 <./getting_started/getting_started.html>`_"
#: ../../index.rst:38 ab35a5cd96c548ecb0c285fd822f652a
msgid "Concepts and terminology"
@@ -130,7 +130,7 @@ msgstr "相关概念"
#: ../../index.rst:40 3fbd5c96df084ef889442a0b89ad6c05
msgid "`Concepts and terminology <./getting_started/concepts.html>`_"
-msgstr "相关概念 <./getting_started/concepts.html>`_"
+msgstr "`相关概念 <./getting_started/concepts.html>`_"
#: ../../index.rst:42 6d9a0d727ce14edfbdcf678c6fbba76b
msgid "Coming soon..."
From 5b156e9a54b8dbb1706b053807268d2a500a39c2 Mon Sep 17 00:00:00 2001
From: csunny
Date: Mon, 12 Jun 2023 17:27:34 +0800
Subject: [PATCH 06/14] fix: lint the docs
---
docs/conf.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/docs/conf.py b/docs/conf.py
index 7dc719214..618e1f62a 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -48,8 +48,8 @@ exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# multi language config
-language = "en" # ['en', 'zh_CN'] #
-locales_dirs = ['./locales/']
+language = "en" # ['en', 'zh_CN'] #
+locales_dirs = ["./locales/"]
gettext_compact = False
gettext_uuid = True
From 6a7c4aa5f6596069e43767d71c614bbde6b63e0e Mon Sep 17 00:00:00 2001
From: aries-ckt <916701291@qq.com>
Date: Mon, 12 Jun 2023 20:57:00 +0800
Subject: [PATCH 07/14] feature:ppt embedding
---
pilot/scene/chat_knowledge/url/chat.py | 2 +-
pilot/source_embedding/knowledge_embedding.py | 18 +++++++--
pilot/source_embedding/markdown_embedding.py | 30 ++-------------
pilot/source_embedding/pdf_embedding.py | 2 +-
pilot/source_embedding/ppt_embedding.py | 37 +++++++++++++++++++
pilot/source_embedding/source_embedding.py | 19 ++++++----
tools/knowlege_init.py | 5 +--
7 files changed, 70 insertions(+), 43 deletions(-)
create mode 100644 pilot/source_embedding/ppt_embedding.py
diff --git a/pilot/scene/chat_knowledge/url/chat.py b/pilot/scene/chat_knowledge/url/chat.py
index 88dc7ad0b..ce45602a2 100644
--- a/pilot/scene/chat_knowledge/url/chat.py
+++ b/pilot/scene/chat_knowledge/url/chat.py
@@ -38,7 +38,7 @@ class ChatUrlKnowledge(BaseChat):
)
self.url = url
vector_store_config = {
- "vector_store_name": url,
+ "vector_store_name": url.replace(":", ""),
"vector_store_path": KNOWLEDGE_UPLOAD_ROOT_PATH,
}
self.knowledge_embedding_client = KnowledgeEmbedding(
diff --git a/pilot/source_embedding/knowledge_embedding.py b/pilot/source_embedding/knowledge_embedding.py
index 7ec0de76c..97b515897 100644
--- a/pilot/source_embedding/knowledge_embedding.py
+++ b/pilot/source_embedding/knowledge_embedding.py
@@ -1,11 +1,13 @@
from typing import Optional
+from chromadb.errors import NotEnoughElementsException
from langchain.embeddings import HuggingFaceEmbeddings
from pilot.configs.config import Config
from pilot.source_embedding.csv_embedding import CSVEmbedding
from pilot.source_embedding.markdown_embedding import MarkdownEmbedding
from pilot.source_embedding.pdf_embedding import PDFEmbedding
+from pilot.source_embedding.ppt_embedding import PPTEmbedding
from pilot.source_embedding.url_embedding import URLEmbedding
from pilot.source_embedding.word_embedding import WordEmbedding
from pilot.vector_store.connector import VectorStoreConnector
@@ -19,6 +21,8 @@ KnowledgeEmbeddingType = {
".doc": (WordEmbedding, {}),
".docx": (WordEmbedding, {}),
".csv": (CSVEmbedding, {}),
+ ".ppt": (PPTEmbedding, {}),
+ ".pptx": (PPTEmbedding, {}),
}
@@ -42,8 +46,12 @@ class KnowledgeEmbedding:
self.knowledge_embedding_client = self.init_knowledge_embedding()
self.knowledge_embedding_client.source_embedding()
- def knowledge_embedding_batch(self):
- self.knowledge_embedding_client.batch_embedding()
+ def knowledge_embedding_batch(self, docs):
+ # docs = self.knowledge_embedding_client.read_batch()
+ self.knowledge_embedding_client.index_to_store(docs)
+
+ def read(self):
+ return self.knowledge_embedding_client.read_batch()
def init_knowledge_embedding(self):
if self.file_type == "url":
@@ -68,7 +76,11 @@ class KnowledgeEmbedding:
vector_client = VectorStoreConnector(
CFG.VECTOR_STORE_TYPE, self.vector_store_config
)
- return vector_client.similar_search(text, topk)
+ try:
+ ans = vector_client.similar_search(text, topk)
+ except NotEnoughElementsException:
+ ans = vector_client.similar_search(text, 1)
+ return ans
def vector_exist(self):
vector_client = VectorStoreConnector(
diff --git a/pilot/source_embedding/markdown_embedding.py b/pilot/source_embedding/markdown_embedding.py
index e2851d122..5f6d9526d 100644
--- a/pilot/source_embedding/markdown_embedding.py
+++ b/pilot/source_embedding/markdown_embedding.py
@@ -5,8 +5,8 @@ from typing import List
import markdown
from bs4 import BeautifulSoup
-from langchain.document_loaders import TextLoader
from langchain.schema import Document
+from langchain.text_splitter import SpacyTextSplitter
from pilot.configs.config import Config
from pilot.source_embedding import SourceEmbedding, register
@@ -30,32 +30,8 @@ class MarkdownEmbedding(SourceEmbedding):
def read(self):
"""Load from markdown path."""
loader = EncodeTextLoader(self.file_path)
- text_splitter = CHNDocumentSplitter(
- pdf=True, sentence_size=CFG.KNOWLEDGE_CHUNK_SIZE
- )
- return loader.load_and_split(text_splitter)
-
- @register
- def read_batch(self):
- """Load from markdown path."""
- docments = []
- for root, _, files in os.walk(self.file_path, topdown=False):
- for file in files:
- filename = os.path.join(root, file)
- loader = TextLoader(filename)
- # text_splitor = CHNDocumentSplitter(chunk_size=1000, chunk_overlap=20, length_function=len)
- # docs = loader.load_and_split()
- docs = loader.load()
- # 更新metadata数据
- new_docs = []
- for doc in docs:
- doc.metadata = {
- "source": doc.metadata["source"].replace(self.file_path, "")
- }
- print("doc is embedding ... ", doc.metadata)
- new_docs.append(doc)
- docments += new_docs
- return docments
+ textsplitter = SpacyTextSplitter(pipeline='zh_core_web_sm', chunk_size=CFG.KNOWLEDGE_CHUNK_SIZE, chunk_overlap=200)
+ return loader.load_and_split(textsplitter)
@register
def data_process(self, documents: List[Document]):
diff --git a/pilot/source_embedding/pdf_embedding.py b/pilot/source_embedding/pdf_embedding.py
index ae8dde974..66b0963d9 100644
--- a/pilot/source_embedding/pdf_embedding.py
+++ b/pilot/source_embedding/pdf_embedding.py
@@ -29,7 +29,7 @@ class PDFEmbedding(SourceEmbedding):
# pdf=True, sentence_size=CFG.KNOWLEDGE_CHUNK_SIZE
# )
textsplitter = SpacyTextSplitter(
- pipeline="zh_core_web_sm", chunk_size=1000, chunk_overlap=200
+ pipeline="zh_core_web_sm", chunk_size=CFG.KNOWLEDGE_CHUNK_SIZE, chunk_overlap=200
)
return loader.load_and_split(textsplitter)
diff --git a/pilot/source_embedding/ppt_embedding.py b/pilot/source_embedding/ppt_embedding.py
new file mode 100644
index 000000000..869e92395
--- /dev/null
+++ b/pilot/source_embedding/ppt_embedding.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+from typing import List
+
+from langchain.document_loaders import UnstructuredPowerPointLoader
+from langchain.schema import Document
+from langchain.text_splitter import SpacyTextSplitter
+
+from pilot.configs.config import Config
+from pilot.source_embedding import SourceEmbedding, register
+
+CFG = Config()
+
+
+class PPTEmbedding(SourceEmbedding):
+ """ppt embedding for read ppt document."""
+
+ def __init__(self, file_path, vector_store_config):
+ """Initialize with pdf path."""
+ super().__init__(file_path, vector_store_config)
+ self.file_path = file_path
+ self.vector_store_config = vector_store_config
+
+ @register
+ def read(self):
+ """Load from ppt path."""
+ loader = UnstructuredPowerPointLoader(self.file_path)
+ textsplitter = SpacyTextSplitter(pipeline='zh_core_web_sm', chunk_size=CFG.KNOWLEDGE_CHUNK_SIZE, chunk_overlap=200)
+ return loader.load_and_split(textsplitter)
+
+ @register
+ def data_process(self, documents: List[Document]):
+ i = 0
+ for d in documents:
+ documents[i].page_content = d.page_content.replace("\n", "")
+ i += 1
+ return documents
diff --git a/pilot/source_embedding/source_embedding.py b/pilot/source_embedding/source_embedding.py
index 50c7044f9..3d881fcdf 100644
--- a/pilot/source_embedding/source_embedding.py
+++ b/pilot/source_embedding/source_embedding.py
@@ -2,6 +2,8 @@
# -*- coding: utf-8 -*-
from abc import ABC, abstractmethod
from typing import Dict, List, Optional
+
+from chromadb.errors import NotEnoughElementsException
from pilot.configs.config import Config
from pilot.vector_store.connector import VectorStoreConnector
@@ -62,7 +64,11 @@ class SourceEmbedding(ABC):
@register
def similar_search(self, doc, topk):
"""vector store similarity_search"""
- return self.vector_client.similar_search(doc, topk)
+ try:
+ ans = self.vector_client.similar_search(doc, topk)
+ except NotEnoughElementsException:
+ ans = self.vector_client.similar_search(doc, 1)
+ return ans
def vector_name_exist(self):
return self.vector_client.vector_name_exists()
@@ -79,14 +85,11 @@ class SourceEmbedding(ABC):
if "index_to_store" in registered_methods:
self.index_to_store(text)
- def batch_embedding(self):
- if "read_batch" in registered_methods:
- text = self.read_batch()
+ def read_batch(self):
+ if "read" in registered_methods:
+ text = self.read()
if "data_process" in registered_methods:
text = self.data_process(text)
if "text_split" in registered_methods:
self.text_split(text)
- if "text_to_vector" in registered_methods:
- self.text_to_vector(text)
- if "index_to_store" in registered_methods:
- self.index_to_store(text)
+ return text
diff --git a/tools/knowlege_init.py b/tools/knowlege_init.py
index ff13865b4..26338df1c 100644
--- a/tools/knowlege_init.py
+++ b/tools/knowlege_init.py
@@ -23,7 +23,7 @@ class LocalKnowledgeInit:
self.vector_store_config = vector_store_config
self.model_name = LLM_MODEL_CONFIG["text2vec"]
- def knowledge_persist(self, file_path, append_mode):
+ def knowledge_persist(self, file_path):
"""knowledge persist"""
for root, _, files in os.walk(file_path, topdown=False):
for file in files:
@@ -41,7 +41,6 @@ class LocalKnowledgeInit:
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--vector_name", type=str, default="default")
- parser.add_argument("--append", type=bool, default=False)
args = parser.parse_args()
vector_name = args.vector_name
append_mode = args.append
@@ -49,5 +48,5 @@ if __name__ == "__main__":
vector_store_config = {"vector_store_name": vector_name}
print(vector_store_config)
kv = LocalKnowledgeInit(vector_store_config=vector_store_config)
- kv.knowledge_persist(file_path=DATASETS_DIR, append_mode=append_mode)
+ kv.knowledge_persist(file_path=DATASETS_DIR)
print("your knowledge embedding success...")
From cbe769eb92836a7c15051f5fdd48f783c5585393 Mon Sep 17 00:00:00 2001
From: highroom <827148@qq.com>
Date: Mon, 12 Jun 2023 22:59:46 +0800
Subject: [PATCH 08/14] Update run.sh
modify varible error
---
run.sh | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/run.sh b/run.sh
index 81d3ec22b..5fc1e3bde 100644
--- a/run.sh
+++ b/run.sh
@@ -15,11 +15,11 @@ function find_python_command() {
PYTHONCMD=$(find_python_command)
-nohup PYTHONCMD pilot/server/llmserver.py >> /root/server.log 2>&1 &
+nohup $PYTHONCMD pilot/server/llmserver.py >> /root/server.log 2>&1 &
while [ `grep -c "Uvicorn running on" /root/server.log` -eq '0' ];do
sleep 1s;
echo "wait server running"
done
echo "server running"
-PYTHONCMD pilot/server/webserver.py
+$PYTHONCMD pilot/server/webserver.py
From 7124c34017f92e275aef7ee8c913e52ce96d129b Mon Sep 17 00:00:00 2001
From: aries-ckt <916701291@qq.com>
Date: Tue, 13 Jun 2023 10:56:58 +0800
Subject: [PATCH 09/14] update:doc
---
docs/getting_started/tutorials.md | 12 +++-
docs/modules/knownledge.md | 1 -
docs/modules/llms.md | 80 ++++++++++++++++++++++++++-
docs/use_cases/knownledge_based_qa.md | 6 +-
4 files changed, 93 insertions(+), 6 deletions(-)
diff --git a/docs/getting_started/tutorials.md b/docs/getting_started/tutorials.md
index 9583cda90..99ac15a51 100644
--- a/docs/getting_started/tutorials.md
+++ b/docs/getting_started/tutorials.md
@@ -3,4 +3,14 @@
This is a collection of DB-GPT tutorials on Medium.
-Comming soon...
\ No newline at end of file
+###Introduce
+[What is DB-GPT](https://www.youtube.com/watch?v=QszhVJerc0I) by csunny (https://github.com/csunny/DB-GPT):
+
+### Knowledge
+
+[How to Create your own knowledge repository](https://db-gpt.readthedocs.io/en/latest/modules/knownledge.html)
+
+[Add new Knowledge demonstration](../../assets/new_knownledge_en.gif)
+
+### DB Plugins
+[db plugins demonstration](../../assets/auto_sql_en.gif)
\ No newline at end of file
diff --git a/docs/modules/knownledge.md b/docs/modules/knownledge.md
index 32a22acf8..5f168cef4 100644
--- a/docs/modules/knownledge.md
+++ b/docs/modules/knownledge.md
@@ -19,7 +19,6 @@ As the knowledge base is currently the most significant user demand scenario, we
python tools/knowledge_init.py
--vector_name : your vector store name default_value:default
---append: append mode, True:append, False: not append default_value:False
```
diff --git a/docs/modules/llms.md b/docs/modules/llms.md
index b4d57579f..c83b73af8 100644
--- a/docs/modules/llms.md
+++ b/docs/modules/llms.md
@@ -8,4 +8,82 @@ To use multiple models, modify the LLM_MODEL parameter in the .env configuration
Notice: you can create .env file from .env.template, just use command like this:
```
cp .env.template .env
-```
\ No newline at end of file
+LLM_MODEL=vicuna-13b
+MODEL_SERVER=http://127.0.0.1:8000
+```
+now we support models vicuna-13b, vicuna-7b, chatglm-6b, flan-t5-base, guanaco-33b-merged, falcon-40b, gorilla-7b.
+
+DB-GPT provides a model load adapter and chat adapter. load adapter which allows you to easily adapt load different LLM models by inheriting the BaseLLMAdapter. You just implement match() and loader() method.
+
+vicuna llm load adapter
+
+```
+class VicunaLLMAdapater(BaseLLMAdaper):
+ """Vicuna Adapter"""
+
+ def match(self, model_path: str):
+ return "vicuna" in model_path
+
+ def loader(self, model_path: str, from_pretrained_kwagrs: dict):
+ tokenizer = AutoTokenizer.from_pretrained(model_path, use_fast=False)
+ model = AutoModelForCausalLM.from_pretrained(
+ model_path, low_cpu_mem_usage=True, **from_pretrained_kwagrs
+ )
+ return model, tokenizer
+```
+
+chatglm load adapter
+```
+
+class ChatGLMAdapater(BaseLLMAdaper):
+ """LLM Adatpter for THUDM/chatglm-6b"""
+
+ def match(self, model_path: str):
+ return "chatglm" in model_path
+
+ def loader(self, model_path: str, from_pretrained_kwargs: dict):
+ tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
+
+ if DEVICE != "cuda":
+ model = AutoModel.from_pretrained(
+ model_path, trust_remote_code=True, **from_pretrained_kwargs
+ ).float()
+ return model, tokenizer
+ else:
+ model = (
+ AutoModel.from_pretrained(
+ model_path, trust_remote_code=True, **from_pretrained_kwargs
+ )
+ .half()
+ .cuda()
+ )
+ return model, tokenizer
+```
+chat adapter which allows you to easily adapt chat different LLM models by inheriting the BaseChatAdpter.you just implement match() and get_generate_stream_func() method
+
+vicuna llm chat adapter
+```
+class VicunaChatAdapter(BaseChatAdpter):
+ """Model chat Adapter for vicuna"""
+
+ def match(self, model_path: str):
+ return "vicuna" in model_path
+
+ def get_generate_stream_func(self):
+ return generate_stream
+```
+
+chatglm llm chat adapter
+```
+class ChatGLMChatAdapter(BaseChatAdpter):
+ """Model chat Adapter for ChatGLM"""
+
+ def match(self, model_path: str):
+ return "chatglm" in model_path
+
+ def get_generate_stream_func(self):
+ from pilot.model.llm_out.chatglm_llm import chatglm_generate_stream
+
+ return chatglm_generate_stream
+```
+ if you want to integrate your own model, just need to inheriting BaseLLMAdaper and BaseChatAdpter and implement the methods
\ No newline at end of file
diff --git a/docs/use_cases/knownledge_based_qa.md b/docs/use_cases/knownledge_based_qa.md
index dfd0d345d..3a357aaad 100644
--- a/docs/use_cases/knownledge_based_qa.md
+++ b/docs/use_cases/knownledge_based_qa.md
@@ -3,7 +3,7 @@
Chat with your own knowledge is a very interesting thing. In the usage scenarios of this chapter, we will introduce how to build your own knowledge base through the knowledge base API. Firstly, building a knowledge store can currently be initialized by executing "python tool/knowledge_init.py" to initialize the content of your own knowledge base, which was introduced in the previous knowledge base module. Of course, you can also call our provided knowledge embedding API to store knowledge.
-We currently support four document formats: txt, pdf, url, and md.
+We currently support many document formats: txt, pdf, md, html, doc, ppt, and url.
```
vector_store_config = {
"vector_store_name": name
@@ -11,7 +11,7 @@ vector_store_config = {
file_path = "your file path"
-knowledge_embedding_client = KnowledgeEmbedding(file_path=file_path, model_name=LLM_MODEL_CONFIG["text2vec"],local_persist=False, vector_store_config=vector_store_config)
+knowledge_embedding_client = KnowledgeEmbedding(file_path=file_path, model_name=LLM_MODEL_CONFIG["text2vec"], vector_store_config=vector_store_config)
knowledge_embedding_client.knowledge_embedding()
@@ -37,7 +37,7 @@ vector_store_config = {
query = "your query"
-knowledge_embedding_client = KnowledgeEmbedding(file_path="", model_name=LLM_MODEL_CONFIG["text2vec"], local_persist=False, vector_store_config=vector_store_config)
+knowledge_embedding_client = KnowledgeEmbedding(file_path="", model_name=LLM_MODEL_CONFIG["text2vec"], vector_store_config=vector_store_config)
knowledge_embedding_client.similar_search(query, 10)
```
\ No newline at end of file
From 555ae71ab50e96d514da104936af9d374c31f928 Mon Sep 17 00:00:00 2001
From: "magic.chen"
Date: Tue, 13 Jun 2023 14:41:54 +0800
Subject: [PATCH 10/14] Update README.md
---
README.md | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/README.md b/README.md
index 129814fae..8aa7ca882 100644
--- a/README.md
+++ b/README.md
@@ -10,7 +10,7 @@
-[**简体中文**](README.zh.md)|[**Discord**](https://discord.gg/ea6BnZkY)
+[**简体中文**](README.zh.md)|[**Discord**](https://discord.gg/xfNDzZ9t)
## What is DB-GPT?
@@ -46,7 +46,7 @@ Currently, we have released multiple key features, which are listed below to dem
- Support for unstructured data such as PDF, Markdown, CSV, and WebURL
- Milti LLMs Support
- - Supports multiple large language models, currently supporting Vicuna (7b, 13b), ChatGLM-6b (int4, int8), guanaco(7b,13b,33b)
+ - Supports multiple large language models, currently supporting Vicuna (7b, 13b), ChatGLM-6b (int4, int8), guanaco(7b,13b,33b), Gorilla(7b,13b)
- TODO: codegen2, codet5p
From 904d32655e5a5ba95c3ef306d506111629e92202 Mon Sep 17 00:00:00 2001
From: aries-ckt <916701291@qq.com>
Date: Tue, 13 Jun 2023 16:04:44 +0800
Subject: [PATCH 11/14] update:doc
---
README.md | 2 +
README.zh.md | 4 ++
.../LC_MESSAGES/getting_started/tutorials.po | 40 +++++++++---
.../locales/zh_CN/LC_MESSAGES/modules/llms.po | 61 ++++++++++++++++---
.../use_cases/knownledge_based_qa.po | 31 +++++-----
docs/modules/knownledge.md | 3 +
6 files changed, 109 insertions(+), 32 deletions(-)
diff --git a/README.md b/README.md
index 8aa7ca882..e6873579f 100644
--- a/README.md
+++ b/README.md
@@ -179,6 +179,8 @@ In the .env configuration file, modify the LANGUAGE parameter to switch between
1.Place personal knowledge files or folders in the pilot/datasets directory.
+We currently support many document formats: txt, pdf, md, html, doc, ppt, and url.
+
2.set .env configuration set your vector store type, eg:VECTOR_STORE_TYPE=Chroma, now we support Chroma and Milvus(version > 2.1)
3.Run the knowledge repository script in the tools directory.
diff --git a/README.zh.md b/README.zh.md
index 0c25c1d9d..2255946b5 100644
--- a/README.zh.md
+++ b/README.zh.md
@@ -18,6 +18,8 @@
DB-GPT 是一个开源的以数据库为基础的GPT实验项目,使用本地化的GPT大模型与您的数据和环境进行交互,无数据泄露风险,100% 私密,100% 安全。
+[DB-GPT视频介绍](https://www.bilibili.com/video/BV1SM4y1a7Nj/?buvid=551b023900b290f9497610b2155a2668&is_story_h5=false&mid=%2BVyE%2Fwau5woPcUKieCWS0A%3D%3D&p=1&plat_id=116&share_from=ugc&share_medium=iphone&share_plat=ios&share_session_id=5D08B533-82A4-4D40-9615-7826065B4574&share_source=GENERIC&share_tag=s_i×tamp=1686307943&unique_k=bhO3lgQ&up_id=31375446)
+
## 最新发布
- [2023/06/01]🔥 在Vicuna-13B基础模型的基础上,通过插件实现任务链调用。例如单句创建数据库的实现.[演示](./assets/dbgpt_bytebase_plugin.gif)
@@ -174,6 +176,8 @@ $ python webserver.py
1.将个人知识文件或者文件夹放入pilot/datasets目录中
+当前支持的文档格式: txt, pdf, md, html, doc, ppt, and url.
+
2.在.env文件指定你的向量数据库类型,VECTOR_STORE_TYPE(默认Chroma),目前支持Chroma,Milvus(需要设置MILVUS_URL和MILVUS_PORT)
注意Milvus版本需要>2.1
diff --git a/docs/locales/zh_CN/LC_MESSAGES/getting_started/tutorials.po b/docs/locales/zh_CN/LC_MESSAGES/getting_started/tutorials.po
index eef93efd5..989630ab1 100644
--- a/docs/locales/zh_CN/LC_MESSAGES/getting_started/tutorials.po
+++ b/docs/locales/zh_CN/LC_MESSAGES/getting_started/tutorials.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: DB-GPT 0.1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"POT-Creation-Date: 2023-06-13 11:38+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME \n"
"Language: zh_CN\n"
@@ -17,17 +17,43 @@ msgstr ""
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"Generated-By: Babel 2.11.0\n"
+"Generated-By: Babel 2.12.1\n"
-#: ../../getting_started/tutorials.md:1 12b03941d64f4bdf96eaaeec0147a387
+#: ../../getting_started/tutorials.md:1 7011a2ab0e7f45ddb1fa85b6479cc442
msgid "Tutorials"
msgstr "教程"
-#: ../../getting_started/tutorials.md:4 b966c15b01f94a1e84d4b6142b8f4111
+#: ../../getting_started/tutorials.md:4 960f88b9c1b64940bfa0576bab5b0314
msgid "This is a collection of DB-GPT tutorials on Medium."
msgstr "这是知乎上DB-GPT教程的集合。."
-#: ../../getting_started/tutorials.md:6 869431aac3864180acb41b852d48d29e
-msgid "Comming soon..."
-msgstr "未完待续"
+#: ../../getting_started/tutorials.md:6 3915395cc45742519bf0c607eeafc489
+msgid ""
+"###Introduce [What is DB-"
+"GPT](https://www.youtube.com/watch?v=QszhVJerc0I) by csunny "
+"(https://github.com/csunny/DB-GPT)"
+msgstr "###Introduce [什么是DB-GPT](https://www.bilibili.com/video/BV1SM4y1a7Nj/?buvid=551b023900b290f9497610b2155a2668&is_story_h5=false&mid=%2BVyE%2Fwau5woPcUKieCWS0A%3D%3D&p=1&plat_id=116&share_from=ugc&share_medium=iphone&share_plat=ios&share_session_id=5D08B533-82A4-4D40-9615-7826065B4574&share_source=GENERIC&share_tag=s_i×tamp=1686307943&unique_k=bhO3lgQ&up_id=31375446) by csunny (https://github.com/csunny/DB-GPT)"
+
+#: ../../getting_started/tutorials.md:9 e213736923574b2cb039a457d789c27c
+msgid "Knowledge"
+msgstr "知识库"
+
+#: ../../getting_started/tutorials.md:11 90b5472735a644168d51c054ed882748
+msgid ""
+"[How to Create your own knowledge repository](https://db-"
+"gpt.readthedocs.io/en/latest/modules/knownledge.html)"
+msgstr "[怎么创建自己的知识库](https://db-"
+"gpt.readthedocs.io/en/latest/modules/knownledge.html)"
+
+#: ../../getting_started/tutorials.md:13 6a851e1e88ea4bcbaf7ee742a12224ef
+msgid "[Add new Knowledge demonstration](../../assets/new_knownledge_en.gif)"
+msgstr "[新增知识库演示](../../assets/new_knownledge_en.gif)"
+
+#: ../../getting_started/tutorials.md:15 4487ef393e004e7c936f5104727212a4
+msgid "DB Plugins"
+msgstr "DB Plugins"
+
+#: ../../getting_started/tutorials.md:16 ee5decd8441d40ae8a240a19c1a5a74a
+msgid "[db plugins demonstration](../../assets/auto_sql_en.gif)"
+msgstr "[db plugins 演示](../../assets/auto_sql_en.gif)"
diff --git a/docs/locales/zh_CN/LC_MESSAGES/modules/llms.po b/docs/locales/zh_CN/LC_MESSAGES/modules/llms.po
index 08fd5d984..bbb05b046 100644
--- a/docs/locales/zh_CN/LC_MESSAGES/modules/llms.po
+++ b/docs/locales/zh_CN/LC_MESSAGES/modules/llms.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: DB-GPT 0.1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"POT-Creation-Date: 2023-06-13 11:38+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME \n"
"Language: zh_CN\n"
@@ -17,13 +17,13 @@ msgstr ""
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"Generated-By: Babel 2.11.0\n"
+"Generated-By: Babel 2.12.1\n"
-#: ../../modules/llms.md:1 9c05a511436b4a408e2d1acd2f2568e7
+#: ../../modules/llms.md:1 34386f3fecba48fbbd86718283ba593c
msgid "LLMs"
msgstr "大语言模型"
-#: ../../modules/llms.md:3 c6549cbde17e42e596470a537286cedb
+#: ../../modules/llms.md:3 241b39ad980f4cfd90a7f0fdae05a1d2
#, python-format
msgid ""
"In the underlying large model integration, we have designed an open "
@@ -34,23 +34,64 @@ msgid ""
" of 85% or higher. We use higher standards to select models, hoping to "
"save users the cumbersome testing and evaluation process in the process "
"of use."
-msgstr "在底层大模型接入中,我们设计了开放的接口,支持对接多种大模型。同时对于接入模型的效果,"
-"我们有非常严格的把控与评审机制。对大模型能力上与ChatGPT对比,在准确率上需要满足85%"
-"以上的能力对齐。我们用更高的标准筛选模型,是期望在用户使用过程中,可以省去前面繁琐的测试评估环节。"
+msgstr "在底层大模型接入中,我们设计了开放的接口,支持对接多种大模型。同时对于接入模型的效果,我们有非常严格的把控与评审机制。对大模型能力上与ChatGPT对比,在准确率上需要满足85%以上的能力对齐。我们用更高的标准筛选模型,是期望在用户使用过程中,可以省去前面繁琐的测试评估环节。"
-#: ../../modules/llms.md:5 1b18ef91924442f7ab7a117aec6122d5
+#: ../../modules/llms.md:5 25175e87a62e41bca86798eb783cefd6
msgid "Multi LLMs Usage"
msgstr "多模型使用"
-#: ../../modules/llms.md:6 b14256f1768d45ef929be664b8afb31e
+#: ../../modules/llms.md:6 8c35341e9ca94202ba779567813f9973
msgid ""
"To use multiple models, modify the LLM_MODEL parameter in the .env "
"configuration file to switch between the models."
msgstr "如果要使用不同的模型,请修改.env配置文件中的LLM MODEL参数以在模型之间切换。"
-#: ../../modules/llms.md:8 42cbe90a1a524d8381a0a743ef1a927e
+#: ../../modules/llms.md:8 2edf3309a6554f39ad74e19faff09cee
msgid ""
"Notice: you can create .env file from .env.template, just use command "
"like this:"
msgstr "注意:你可以从 .env.template 创建 .env 文件。只需使用如下命令:"
+#: ../../modules/llms.md:14 5fa7639ef294425e89e13b7c6617fb4b
+msgid ""
+"now we support models vicuna-13b, vicuna-7b, chatglm-6b, flan-t5-base, "
+"guanaco-33b-merged, falcon-40b, gorilla-7b."
+msgstr "现在我们支持的模型有vicuna-13b, vicuna-7b, chatglm-6b, flan-t5-base, "
+"guanaco-33b-merged, falcon-40b, gorilla-7b."
+
+#: ../../modules/llms.md:16 96c9a5ad00264bd2a07bdbdec87e471e
+msgid ""
+"DB-GPT provides a model load adapter and chat adapter. load adapter which"
+" allows you to easily adapt load different LLM models by inheriting the "
+"BaseLLMAdapter. You just implement match() and loader() method."
+msgstr "DB-GPT提供了多模型适配器load adapter和chat adapter.load adapter通过继承BaseLLMAdapter类, 实现match和loader方法允许你适配不同的LLM."
+
+#: ../../modules/llms.md:18 1033714691464f50900c04c9e1bb5643
+msgid "vicuna llm load adapter"
+msgstr "vicuna llm load adapter"
+
+#: ../../modules/llms.md:35 faa6432575be45bcae5deb1cc7fee3fb
+msgid "chatglm load adapter"
+msgstr "chatglm load adapter"
+
+#: ../../modules/llms.md:62 61c4189cabf04e628132c2bf5f02bb50
+msgid ""
+"chat adapter which allows you to easily adapt chat different LLM models "
+"by inheriting the BaseChatAdpter.you just implement match() and "
+"get_generate_stream_func() method"
+msgstr "chat adapter通过继承BaseChatAdpter允许你通过实现match和get_generate_stream_func方法允许你适配不同的LLM."
+
+#: ../../modules/llms.md:64 407a67e4e2c6414b9cde346961d850c0
+msgid "vicuna llm chat adapter"
+msgstr "vicuna llm chat adapter"
+
+#: ../../modules/llms.md:76 53a55238cd90406db58c50dc64465195
+msgid "chatglm llm chat adapter"
+msgstr "chatglm llm chat adapter"
+
+#: ../../modules/llms.md:89 b0c5ff72c05e40b3b301d6b81205fe63
+msgid ""
+"if you want to integrate your own model, just need to inheriting "
+"BaseLLMAdaper and BaseChatAdpter and implement the methods"
+msgstr "如果你想集成自己的模型,只需要继承BaseLLMAdaper和BaseChatAdpter类,然后实现里面的方法即可"
+
diff --git a/docs/locales/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po b/docs/locales/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
index 26473de7f..00acd9ff2 100644
--- a/docs/locales/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
+++ b/docs/locales/zh_CN/LC_MESSAGES/use_cases/knownledge_based_qa.po
@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: DB-GPT 0.1.0\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2023-06-11 14:10+0800\n"
+"POT-Creation-Date: 2023-06-13 11:38+0800\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME \n"
"Language: zh_CN\n"
@@ -17,13 +17,13 @@ msgstr ""
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n"
-"Generated-By: Babel 2.11.0\n"
+"Generated-By: Babel 2.12.1\n"
-#: ../../use_cases/knownledge_based_qa.md:1 a03c7a5aa5cc4a3e9bc7bd3734d47176
+#: ../../use_cases/knownledge_based_qa.md:1 ddfe412b92e14324bdc11ffe58114e5f
msgid "Knownledge based qa"
msgstr "知识问答"
-#: ../../use_cases/knownledge_based_qa.md:3 37607733852c4ade97c80fbcca66d573
+#: ../../use_cases/knownledge_based_qa.md:3 48635316cc704a779089ff7b5cb9a836
msgid ""
"Chat with your own knowledge is a very interesting thing. In the usage "
"scenarios of this chapter, we will introduce how to build your own "
@@ -33,25 +33,26 @@ msgid ""
"base, which was introduced in the previous knowledge base module. Of "
"course, you can also call our provided knowledge embedding API to store "
"knowledge."
-msgstr "用自己的知识聊天是一件很有趣的事情。在本章的使用场景中,"
-"我们将介绍如何通过知识库API构建自己的知识库。首先,"
-"构建知识存储目前可以通过执行“python tool/knowledge_init.py”"
-"来初始化您自己的知识库的内容,这在前面的知识库模块中已经介绍过了"
-"。当然,你也可以调用我们提供的知识嵌入API来存储知识。"
+msgstr ""
+"用自己的知识聊天是一件很有趣的事情。在本章的使用场景中,我们将介绍如何通过知识库API构建自己的知识库。首先,构建知识存储目前可以通过执行“python"
+" "
+"tool/knowledge_init.py”来初始化您自己的知识库的内容,这在前面的知识库模块中已经介绍过了。当然,你也可以调用我们提供的知识嵌入API来存储知识。"
-#: ../../use_cases/knownledge_based_qa.md:6 ea5ad6cec29d49228c03d57d255c42fe
-msgid "We currently support four document formats: txt, pdf, url, and md."
+#: ../../use_cases/knownledge_based_qa.md:6 0a5c68429c9343cf8b88f4f1dddb18eb
+#, fuzzy
+msgid ""
+"We currently support many document formats: txt, pdf, md, html, doc, ppt,"
+" and url."
msgstr "“我们目前支持四种文件格式: txt, pdf, url, 和md。"
-#: ../../use_cases/knownledge_based_qa.md:20 01908d4b18b345908004a251462d42b3
+#: ../../use_cases/knownledge_based_qa.md:20 83f3544c06954e5cbc0cc7788f699eb1
msgid ""
"Now we currently support vector databases: Chroma (default) and Milvus. "
"You can switch between them by modifying the \"VECTOR_STORE_TYPE\" field "
"in the .env file."
-msgstr "“我们目前支持向量数据库:Chroma(默认)和Milvus。"
-"你可以通过修改.env文件中的“VECTOR_STORE_TYPE”参数在它们之间切换。"
+msgstr "“我们目前支持向量数据库:Chroma(默认)和Milvus。你可以通过修改.env文件中的“VECTOR_STORE_TYPE”参数在它们之间切换。"
-#: ../../use_cases/knownledge_based_qa.md:31 f37d80faa3f84c8cb176a59f4ff8140c
+#: ../../use_cases/knownledge_based_qa.md:31 ac12f26b81384fc4bf44ccce1c0d86b4
msgid "Below is an example of using the knowledge base API to query knowledge:"
msgstr "下面是一个使用知识库API进行查询的例子:"
diff --git a/docs/modules/knownledge.md b/docs/modules/knownledge.md
index 5f168cef4..8cf9cd4c3 100644
--- a/docs/modules/knownledge.md
+++ b/docs/modules/knownledge.md
@@ -10,6 +10,9 @@ As the knowledge base is currently the most significant user demand scenario, we
1.Place personal knowledge files or folders in the pilot/datasets directory.
+We currently support many document formats: txt, pdf, md, html, doc, ppt, and url.
+
+
2.Update your .env, set your vector store type, VECTOR_STORE_TYPE=Chroma
(now only support Chroma and Milvus, if you set Milvus, please set MILVUS_URL and MILVUS_PORT)
From 6101069cfe8d5247cbd792d16ea8ae6ed2a037c0 Mon Sep 17 00:00:00 2001
From: csunny
Date: Tue, 13 Jun 2023 16:48:30 +0800
Subject: [PATCH 12/14] fix: readme demo replace
---
assets/auto_sql_en.gif | Bin 2322033 -> 5284793 bytes
assets/knownledge_qa_en.jpg | Bin 0 -> 379645 bytes
assets/new_knownledge_en.gif | Bin 2579401 -> 0 bytes
assets/wechat.jpg | Bin 216029 -> 261514 bytes
requirements.txt | 2 +-
5 files changed, 1 insertion(+), 1 deletion(-)
create mode 100644 assets/knownledge_qa_en.jpg
delete mode 100644 assets/new_knownledge_en.gif
diff --git a/assets/auto_sql_en.gif b/assets/auto_sql_en.gif
index 78b515d62df49e02123333f736de5357ce63dfd0..8c4686a493937af5f72b6bcf09eeaf0acb310735 100644
GIT binary patch
literal 5284793
zcmeFYWmr^S)c1de79^xZLRu*a=^hZIyBiTD1SAw`hHe-YZ3<`zGB3oqvSy^OHWGl?Z23A{(Gz1(DtA)cJ9$;`dq85&T
zKOlVs6j}eD!5-j<2RID=fM9^a02?ivT9_kTYt@B6EhqY-C{!vanhxGB<=Q0|2#wL6PxgwQB!c
z2m+8r$krlD7{CByV1U^`5e&$I$kGNOi!@o}OJqLCzc#;5L8_1F}T!2@EcaoDP5^yV)Sug+PXRfC3N5np)ri4#1&W54Bow
zq#-N+t3h@{I>=N3I2^gF$bedK0BJxi1Jbl=k;7%R9+0CRv>t3=@c(9k!J)8PKt_fQf+M5c&F|5(PdBA_@r(4G)Wqj);ZEMJ2|>rz9mLr=@0Or{`wn
zWftb-!-@)uOH0bD%4#ZWD;lcnnj7nzT3gz?+PXV?I{y6b9qjKL7#%8>=9wvn|AtP5Rn?n
zR?p6iwvFMzJUj{wSU-P|cK&kpBsrhUr#Vj|~1SnJqW%2*p)!E}g5k
z9AO~rIz*6zavAxDok2DL5OMAM$WZvpBMPb!!A{yyCZ1Dwb`29
zZw*Crsf-T0)>sUsOL)()HS_FEW~)UhkGR!+n=Jd(q5ZCf?`WaQVGLuj?3baP?U(s9
zy;y;QeGC`wh%@_NVHJ?|!+FCA8=G%fW$I@GG&Ne1v_IUPpKq*qcXT`=?rvYwKptK&nfjmxzBII-zj^c06d>cA^mmrSS_>dPmj2{V
zp$(@EBGYfo4km~Z$PRfOctaaX6WO>P#?rT&6VCWsU?YO}uS-sZ;Cf?jB=a4ad6d)>
zlX;jlo|QQwMwWdqFGiK`6=Rs@*-c)YuJ$wY1a0Lx#zX^a*SsXf7}xw{BD+C3j;$ela
zB6=~u+N}0HGaIh@L-mdF><^uvjpG1QZ0T7)TV`wZ(7QrPZXXhZP9JoS`5u~d``ejK
zR1#eXfR5L(4?&kvV6#R?Jv^}D$DInRrP)@ef%T9xKrb+eq>dmMN^d*CsA#&JUFc=+
z?O{bW^^XN9_ARe|C4V2X|saMjFA|n5N2wJ&1`1=$tF~QIK3Rnu5Uq%JOgL)N8-OEG+DVaX;LOWe4HlAii
zBhLCpd=pof6HsQQcIrzyyo$!F+lA{^=||JEiYZMdi5GL~|HebHXg?^?E`XiAt7$UB
zc^ycbBH+a9*8pLi5>e6Z7)m6|;eA_&U;?Y!Ld<3=#&5Wj1gk<+&2xw?KK8y9`J
z4pA}ZpJ|ve`^5;=p@BX0bT|Y3(n4oYsHQo?<&CUdB-W2*79hR(kKD=6Gem60b4Df^
z=0WwCv$z7qFL^xq#XLFo{>=XQYzhKHI)Bd+>W9CuIW#F4%~U6LKIC({x@Ie06-X@H
zjE;t~r9TT%fna4QMr0i6{
z9eo&HD7;Xt>^kF=apnJ3?AmqAYkx9xGtYt!*L2t+?Sv7g$i(szHXiu?A{#G)N$M@j
z#KFHq51WC!z-72(WZy(C#Zi&0xX@o{%thWCC1y&kdD?_ghkQ<3D@7BQ$&|kr1&*~=
z$`06nlaDV7MW?M)UG>#7@kolKNvzdFLe+EGNs5)UtTmJO)r+(*i*ym5*4l;ET7`~3
zOLV5K)o}K+(vvwfmQ9BY{x;LpmtU6JII!xk$+OgrPM166ml&Rh&NkO;RX~moj34i`
zyD+XQ0+razUK;53El*c@1h9RgUVsm(UsXrf*?i>^o*QwiugRh+wZt!(8;?n-$ML;(V6pCuQIU$(CD<}uo9T>|eKL1KqG3Z^op1zzS{HWO0uw`z3qbQ&H
z^V_UF{{ZhM?&I_k7g;CpOX{YZu@6!RUni>2?PY$$6>dcNnW*Nb_x)&9goVcjc|+r$
zSAi$)L2R3JM7RAPmEXrCmTtb`z8z4Gd>>cHw#A}xJNSp#!eO@hOG0Pekm*cyQisUb
z*1+UpOFpOM5!g0Q&Fx5^ZB5$R@D}f%}q`qoc+3>zrOY?N4
zS6%66r31Z%=9$L!%XrW54XT5iKO!05O?sIR&4}*dKcufJV-X{VUxXLn?U8(y-b)8o
z3N7>S?YeraVOtt^VffVYRYUiZt-XoC;&y#~bB&RmW7PfP(Wqv5ZaA*d*uwHndqeyA
z$cg8D%L;(tcYS5MuTJ!^`l}vC5>$B_IN7^~?cLZzuj~-0vbatXB+$oS{x8yGapP5@
zR+pOSSqy#JBt}Q=fR>k|N(JQ>!}r@_lViu^LdtEP%%+jgqh}c#9y@{^t|J?ir#U!_
z(`y3FeRK49wmeTEK0EQ+
zxw>ZQ&{m~oVcPhrZu0TSS%&}Tfzg-tbE*NalaUp4l>q>aS{P9XQ3z#g_zFw>^2;hhh;(U-TRY!U
zrkV38zwZ@!xiOoj%%c#kCd!dxDaY&h
zCRq3~x%jd)es3-CK|5tWA@U-u1f*-dcFuhL>=D4)4nU?0a0`H3V}OJApoVVJ^BnCd
zP8Pf@KSBWz{;DbItO)?&Tdo91po9o@E^9^rbsTUd;Y20kET;o7Ik^ZbCCzpLRH%q`
z5kS-ppmFo1#}b})OTx3*Q8+>N0)SnTxS1?h$)q3#C*QO2Am2(5F%ighmj$1O6SMPE
z<6Gs1901Fn70o`-T$ae+6u`;?aI*Z+po9eD0097m&;Y9z92ra?)Xoo>sv%ACa$f*cQTvc
zaRJT^sP`g)*SIbx8mF&0BV?8U0hrNaJ%=tO<~
zfS)~k1EDWY4X3O*$GD5vpsXJbH&E2?$LGR|qY=E`==(m|mq##`PcL?{*HV@N#Ea(F
z;0SQ2ae_=aFaeMq7GR;v$)^^R5locXPs-C6=YAGLD9ahe=r
zz)b^qXjxv`v(D3jU>ZR4dHB5shxwc!ktu5SdLZBwSrQ}MbE9E=~cT%eKQfrt~E1FUpo_W^pr8du{`u2HFGJrNm8p}GL!*-XGQv3N;
zfY`{#Tqiz6F1L^nGwpYgyUzeLx3o53*D=&CFqiz6^yB^)S(fhxfw*#^irmABHz
zgSkR1pB<%5!6`xPx4EL&wjWu-TXw_D$-|pv!#`UWbl-%Tg%;Qi7Fb{xe$y`?+sX+_
z^qmm+a#U!9hMMbTUFc0-1Q9CgGf(ZXD$)yyZ*?siZpaUoXK4ya4I@tr7b=cnNsGo#
zOVlq;vMx@!D~`g3C6mMQSd{&R@O@KYg)mrYGpu|bR_ClrjUrQ{UsCH(pHjByRyLzw
zI@F9P)0O_z#}K)_|89p|Xs_94cfP#huDlYv;*7lFLZ|}o#*iz|HMhAULm&1ySdmU%
znI=>jr(cQ1-n_Rwm!)N1;Y>NhRbJi;}6tTh59X3>o%R0}n)4{E^oHIfvyM|FiO
z>biKsp0DC-ad9fqD5?~3>Xa91aqepgztyR^*J%n@=`GZ0u+~}fB`;Wb_d6=JM3K@JCXq76g4zP5bn74Jov5NPX2c;
z1ee1!?(Mp_4XyZ2w|3(nZO(&g-qe~T_ZqH+rqq(AcNEQutj!z-&B+H%dG5_;@%g*-
zMnyRQX;vgoRuoppz$W6oN^TP3*DXhqx*3`@BBb=3al1_a%K6zM4Uvrbp~N|
zCM9*ILv=3?>xk33NEf@vD1TFycF|DQktqBaU}Tw96@KoQVE@XeQ-Nz^HY;Nn5fB(6
zm~93e#J=SUm%6hQf3C-eVGkn23ck|#fG5Yn8_Z+jD!hE(vq;gaD$*N`S^X6aXwTJt
zVILTB3Ov2XTK&K^kHY!wQ)mWCOm`|k$k}Q3z!4+}m@g*w2!~!ZViDG00YG5)LcmDP
zyDvqdS;Eb}HqB}GO+~`}u+-*&(taPs{)n{x;MV?V4-?@q)5QRnIa5ev;=oq(KzM^o
z2IXLi$e_C6pr*%QLE2z7M!TM~4*f$H1LaVq$PkI)&}*9^y4J2{!{04zLyZrE9g9O>
z8S4w=>tP&|oKR`<=Y|S(-%;iN&SJ*tO!=RqNQz@Yvnr81OiTl0JrZG=@$!
zjww2h(Ke1XGLF48PVhKRm_AN?G)_V_@ltexv~7ZHWP*HYg6468E`5UGXo8XI?_1Ho
zZ;r-o9e(>KxO8tS^zcl+H=68{n&h{g$ZOd^~D;KycS0l@3
zWpmfKs^|eSNt?sl^a)=#=h5&MFpO8RyjHO@R$-61)Vr>nR?G=8#G%4E`ufWq?-t3G
z7b!E=sKgduzh0*iTcdZe&TwZb>9#
zLv(pb?DZyCY*UzhbN11dS&cpWcFB1Skmzn*9ijApvdogx<(HVO#T?5`0i$$~>QW1tj5jq0O
zo8rcs1$LW-?fbCh{o<$nqSptdV(so~`~PT{L+1@mN4IYyw!a-8e8D?xZ9n*Jd}#gp
z@Q>J`)$+lB-QlORm60z~AiUiv_T3rdBe>Vme8!Qn8e+}$=bCQBUYyt7X8GQ_*YTFv
z@pi`XuHA9gGG0Jvt-a_$_3{2C-oZu2$yNEuP5a5y^2wCwR;>2c#?&Fk*ddn6A!fzt
zvyQ{(h*Lc3e;w_ogi(jM;{TW(cfLFyS!_Rgz4DI+@sE!BthB8;eA)0YO%6w@pkcVQ~Rju
z*r=NLrFv%7-`At7BBS~yBZey@Mu>i6>MK+6t3}1D^@qzZnOEP@k2P1Wv`((<5m$(r
z8D6oAkKeE5RW4lcFXYCq6%f~ME7vk7*FN|+zfKTedYnYxGP_|tBks<;JB|EyQ}rS
zug|=%skpD}xNl#%?~J<7P%*7ZSA%*#L`OXgWW1rGA_ef1ELSj9EWY
zHu+ljj&2n19~u4=sW<)dcZMx$hMg6!$R8gVI(xXMJ_bRf>~
zu>f=c!`DABz%L{yG&nK>8XglC7ZsZjADx_-l9Z92m6ns5mzi6TpIuxCD=IImEX_qh
zudT0ZY-j*sqhNzDT3g#Y(J}h^{|q8=En;+JeC+SU)a1-Ge0F|rabbCBb!B~Rb7OmJ
zcV~a^@Zk99^yKW{#rf6c&Gj9;;+Q523DNiT1+)?Y9{}C(s7`U5t75p*Ip7Vc-du(Gq?mAtec61y{h#J^imS54
z<{u~fyx$Tz<~!`Wu&*eRPE9+5aOnB|1Gc?TA_Y6jJ{~$hV0d$~1_Zw3$fI<1+UQBA4r&*<
z{joj!?lw}{Na)w$N?DLMUb~R%zwMe#lZvN?6WImaUhfry7SEgOgN>Dn4n)rF8Iss~
zBeryCd{D7vSA4KB`d9s)Nj94L6DwP0`;nNr&_XE5lRgDu+3jWr(^^~71>@#0t%cB6
zW36y$z0!nowgF&zuAR!dvB=wt{*9=q-Q5i+m{e{v`s16B%@}#Ufz4QDmboFY9xj7w
zc&kyItcHT%R=#vN&jpZZE;+rGOci1EHQA&(WIM&a{?$&Zxbgv-M*?O-q#f_Y$C@qG?x;VEW
zbRSk&Z@pg2l#9LV~val`9ddE-=u0ekao`GH;Yq4T|6%PCBag`ER!yUq)52lMu&3#QqaDLfF0=r9)=|Lg60I
z`A4;YGJlY*;?Qx3vxCiPm}e*0Am?G(g@av;N+T9T%+m>ga9&vdm_Qd;?VL~-?1Z2a
ztvc4elYfaA??f4+Q?il4qoGA5RE2o@KlevF0MK9Xtk%rP672&EU3@(NfJZqALDk!7
zgMx4<7wRQZ2s!~R`lCCGObA{1&06?}1()?Gj^mq6s0O?17Wp&S!l?eI=^3jiDbPY@ya5sXfpwgg7S
zMC){$LSq{j0}=OwSK9d@CA>7CVH^_>7SMu22B5qEXO(F3N@#Zt`VzOAqVpGZ;eAD%
z`jI`S#gq_`d@&mMvUcCF%#oh=)wGO~-Zv~ieM(|CN2QzhhXW<=R;?|BQ=Y$g!KTk9fg&3$i6MYw9FQYSfJXDK
z9D@ftD+&b*{Q7MFD+ok7-9$&>O}!W$^IaA6PC!dCoS(9@i#6XEx%d0c+jQJ(YPpli&gK{pyrHv
zLPJ{qJCh#4S4;gLwa;eV{X+;gaOnV|)lM`58t|5uDQ$EqE%1B~@|AtW1%V>4K<)Dt
zVx2SQqJYy0LI?Yxf*~lE+;GlGGb}*y5Q~@qgh>vrqBea_i>}%K&sYa^Vf6Q9{(Z(g
zQx>_|6)e^EE0RK`n~NInp(MK>;0cd{2|JlUqc?0{GNCgMh+&QYkW@)l#uUc^;%C
z%YWV0@uztPQ`ryfJRo440YnZN?cyQ{
zdhsAX?%O9+A^`?ne^^C%?k2{sOat1?>s%4qi*Edhi!I~w8y<=LVE|~pVFBxp$1_q}
zprb&I2*~s?Mi1yC4+;T+sni1IpB9j9ilUZ2I-x;#gLG}GRu6)1-s6`zM(|IRG6Veu
z<*!Zsu1I3rySVl7UT-2Hp*CB(%!;b8iFE%$Rg3)NE+3sxgvjZuXP0xR!5uhbZ1dwOOX~Q!W`YmIpeC9=>n@q>I(Wv
z%@s^Pl$MZN{e|`!4Gti*3P0S#PZS35
zes-9Ff(h_xE#Xk@YqCONZ7U+~?u-eCEnY*68=2RoUa9DkLM8l8NpUo65hkW|d|h
z7jKu`1Ak?1D4nypd+C<NMLw
zetiGUA2Ju&FDq{PxS#$-Mu}{`<+vTxcsffM6WQjkxE*zRI?uot-Id|E`y2aoQJ^Ba
zuTyb1Q}c9L9woZ~44pik
zSC~9qRfs>Fi+f*8b|9{=y&w1?_ZA+^7w)SGF4)25&l@3la0tN-gpkaKSipx=*N4o-
zhdjxLveD-?+=u4IhmOpbLBRKo?*9$
zKcQVeksCirGJmOEU$CYBhhTqc7Z(L4Kbd}id9naifdJ(se@$IK72N=(-~jES0QKDf
zokl;C{s04}z)wj5<^q9Vu>x(95P=r(06Tc#H?p7~0zuql28xZoZb`l#jX_@UAjnOS
zFIlj^KyaXLaIi~oXi{)^W3Y~nJH~_)iZ0|vCL~rjB*`TtW!F2643b{toqQ7lV1=gc
zh7`z#=Ey>_T|#r=p)lRhqW;i|q|gM}uoAMc#@*05rm&Wzu-imQ?b+bpyTRSM;XN+l
zeM#Z{jp2jv@Zp>AQL+de%U?f(1D!1+;8;PkMGs45{Sli2k=t02Tfvb#
zOp*KW$itgRN3zI&Ns*_z5f=hc2fuvhrOJOZik1F??qEG$IuAI(B4MBX^LT)i(%Z0
zp?DR`z#Pkb8^agkg_&gf>;@`Q3;}_EiRIgYx@CgcTh^bS18kT*sH79<
zei-J#^9ag!CVs5IX^j~6djc0SY`vWrpS^#rzJE`&&gzqrsOYz6i&%X
zE|UpC%uzs7RL2}5s(ml2Q!eedYgE@j6upeU&Rgw%x;SvGs@Ww@zhfL;5Y8_rl36(%
z5Cn(Qk$+{ww3#4&p$S;`BrPrO=vMj}1qQtk
z{ca7Qt^%k8yz27>tmI-@%^=u^!aAVrmlOYxc}XA%&y#t!1(6&kAUrZ%f^q8PuBqnT
zV;&qJR;eNO3=H~B!XuJL5_qR*(p!ORFQua?-rfn2ukw_n6sg3?bzsBxHI$%J2+qn_
zAWcXM$}94W2K)j7=n;!y9suW)LhT#Eja=2c^uq1f7T*g5kc#tn4f`S%%
z8uI-z=JG&v)&OOe4v8vQ(vO$)E~EXNhkA>LuI|IqW|{#BV5g~W8wNaA1!skdy`*W#
zBUKZ3Rb{+WGD8P%H|fr~N{hb&!1_45sEu)h4QOt9#6P9oH^G5Ch)nThLRE3H#B?s4FiS`>hT4Wm0ARD#cQXy0rBm67mjn!a(=RkY
zSC5ki;XIeqA=c?r75`~2Qk4=n1*7m35^
zy0prXL+wJ3>rnjJ(Ln&<5P(Pt08#+pP37;lygAQ%U1fYVO4CYSR^Nms)x}3H}eU
zeLeh?jD3`hNss7=H;#P@U+^
zLEiQ=@pd8cHV^P{H}Jhx^f4}}V^x$F4NOR*ta?F0aH>7YfK?!(MoBSL9V!CK2T6GF
zP5RdX2JH-6U>%;ep@V^iO`f5a68TA-aGoZ^_@ps3;3YvHmelAk4Ui9!Gyq8!{Z<|t
z2ikH{yW|!>=kDH&6S;+rCH2bs5~#P`3uWpfDzQs}5)C;FaMz0qEfiIe$hiqz2ryRL
zd-*}NGMA*=NlsovNP07a>an=oI&CmHjpyAYuV|V;L>gb)B$x>rJgczn$KNnN(U-y_
zDVov;sFShgxDe@ha4VM{3O!8<;hHv_pla`cqo#ovOseU)|3j?1#VBx#DGhRr$g?L%(M<{rG8FhTY2m)J~B)Rk1q_
zVRt2*s#qnpfPqb+H{X@A@k8~X=<3x5{8$$mp*`)szIt-KnVNoj&lzfo+_ICTtFJeN
zE>U@L!8|zOl>&Jn0yb6k=ApHwf+SId$ZCk5x*G|0#O(L(OoM=_N57dnRQ0~qIjP(k
zl=#IudQ}q-vAYdGjyHwA8PFwKaHA3?(HHwRD0IR+>*=KF+xhXDsQ9Fqr+-Q<#5`MK
zGbx)~++BFZv{#CyR9G2Rgc!ZLI^}O|rKZUjcz>88P5!5f&oaj$vPrfXbHv4fP%rW8
zUeZX`&*bGK%+{X;J*ZxL?czA2-;r6A_5
zVYgA7yv6x?OFLup&G8oF@dhUzqLPbhJ1=SDYviWZ=$64joe@R7`F-7Iik%;kJGBy?
z!mevrw>>1IKB)UUKCgTIjCX^TcS9rNjc%hv$|ePmcB75cV!hG~EK|8{(`=IW;z#$O
zPkZG9?iuWRS;~9)c6$ZN`>>4t(#ZXCJU^Fyjxe!<5aWYduY>xGgU0rQ-lR2tKPG46
z9Vf4yFRVL#89Tj^hh616zr7B7+7Abw4!f5R2gQzty^bc7kNz6}?3&$aemZJ>eY`4m
zyl#BF>2)j@NYM%3&mBGH_uM~v+COREKYQA}cztpsc5-ZYa=*NHXMFNBdII2|AV&B0
ze*|!?AMCcDZZDty53oi2BQWvJ$*}2h{`aTAhCJ#YY34u5j(@MI)DXjIOqz0c@O
z&RC+(-ejJ!b)2zNpT847=Ttf8@IHTUaxN5g&X;*Ef~2(67oy@9V3iBxmG66eZz7I=
ziX9iqD;KJW3w4uUsNN^&-j^tuCwftrpp{Fsj!V;%OKs{a6OOA#<10(?tFI=P7GsiX
zh${{1YkTo)N0aO4&yHNaAI(J`xt<)(KV5rNTzi@vc~Rd4bX*6E-z+HKgm>ITs?$$sM;WR7LP!
zOX6Nz|Gv@tzF8cR(KL47a+2FYo!J!?*S(V0d2-+H{V>S!FfjHor1CI=|2X#jVbtXD
z@At^H3>Z_2*wx*@2wYjt7cY9A;UsrGcpYEZ7;lc5-%>O{{6w2)MJUYq}I`+!i
z^2X}=&gSkm64>Gc7!l$BpciLXK{w}Tes_-#r@$SKu1Z#yFV+c_W)kO$WDxGP^aFo1
zO?Mapf=hl%HafRAoQ6N}P~a@LKbAo)MRC9}f5?ZE8Wye>T_BUn=r9^Lr*5H=!QpfF
zTwkL|?Za}6mxf!JRb%0Hsf4H7JC;V6M%Bk@@^Yw-qDi+e6>B})T+P?X9HWx@cMA=6
z8^6<88`zgxoX^*nN*Xv;I=oO(U$gxuu?-}mH!f}D+Gv={6DljmdI7Wg_4PRqVu
zoQTW2V^oh6l@g5SptA5)LQrw~b+IC_c{qJg`Gl(1QJ=Gok1EN$(3?UM+hmPs1-iMs
z7-7LzjB(;}dW`W>p9H586qRo`6O9918IwMTCNm~mqzvT8TP5smD+FjdXYkDsqr{`4
z3!EBf=xYJ1XwMaq5>_0|N-!EK84^&1@!5k>3E5ayKSyfG@8u`_2W(TE2lomy0ks8~>>dB7^WwmhQlCru*?E~BTCExp_`VHr|!D;d7WHi8$ps$IQ(j4|7@;%MD
zQFcu@4&-1@2|(IjG=5wSrXhS->!Kqq<@}BDcKGx+p3LHD4}r$RX)mEpDc8F{>l&TOa;Dg`;G^ahSD6u4agP$EIeK`Q*@P4BS0jGcHa<#UsD#
z#hq-Wr^IVDsVzsst5c+%4x83BDf>BN`2FbTtf}`qXSn%6?>;Op-}21jTjDU^LPRht
zf0I*?!u68#j?wY*&+~|e6}RhmH!FUa$~S9)*Usf7dsajZ*3Ob*2AhZiH9O@^voGOQ
zy(zy&Z+9r%UkmM~M%lUT6@_I8?ZNmpa`^T$ETTngziYb7G>nPW4ojbV{TS)kSQI|}
z`0}LXUk`PK$JsCr^~1>c$+E{qw{TSJki&ap$e+)@Os=k%gYchj))Q5pZnq1go(e2;
z-wCgtzI-jde*V@N@p!hQ((!b49))-U4ov_=VM8yY60mPgbwKJOgw6-`DRIw2qnDMy
z)FwbmY-up~1;QiKm6T@;jIj#ew!KJ!x{a~YVplXszNm-#PrI)@(~|8bo8b)L8>Yph
zaOgZLE>y=4jw0}r?V%;F3YHbgA&l+sVH842Y&~*_b7gy(^{Wt}x~(~+HT}Ks{?ph#
znaIx?Jr;Vtuky!u&8!Z5=d{^9{;$Fu)aV#~;)Q#`0$9TPjsMYA!r
zK|}h2CK!!?4rFHqF!o3
z=2OpG@S5PbcTQ{;Ca93^%+gvsthZMSQ`K&T5
ze`EevV=J!%`>{k*p|8J$7&^
zU#PBF)f1+fD?eRq77fG^DBSDk$DP72O3GRRbOL*g%9OfNOVJYj$-$Yr=DIR_&k{pa
zX6pHVLZfFQ(9)b+!1gM>!V8zp6w_d&gH*dRh|z}KsSZNJcvaQKV`Kh8Sg#i^x;n9r
z?FCnh-pDaYbuzlGh4lU0V2pNkHgB1=#?bsk`Al8eJ6jtyoQ3(pnfltcGCQXsgVE!e
zhPF!^dlj6;b?n*39?^2de;V7_rHyN)8~-Q
z+CNKKiLJz(01A31)63%P-}rr%f!xDuxbBVJG{@$agcBe3%iod{T^8uPPgy55sm=O5
z98Z6|0KsMp?d5Rhiu^UK`dYa55BRAH`fmR-9p+sBhrZqD_%w4e?pFhPq6!9{LBToridND(cYhN`c?$T;_LQCOI^6%V
zyl%q&yU(d^66PBZ!lBYmS_oCyb7_
z%NLf8#Q0k?g-^bB82pdKcBAdP+fM2VaQ&;}h|1mB81P67>&k|+@3m+^`@NWsFv8R5agjIJo
z{)_cNNOE_Ath^u?gqKz-6m7e*;>~|!|5sz1W?ZUW943R2Din9`?nLL
zN2$O69g+-1sacvvrFVNo=W`)~LgcbOsjXV>*_Oip1OYHOB6y
zYpBM~$7|AV!e17NKVH?3i5~x|xL-bhx^Ay9EBaHdb=it9CLtq^(ZlC^zsfM^8^V%kZcp}Mw?T{Y<|PoyGx-9)8qdjwnLOSIoarEWjl=Fex~=
zF*p<+9Nr(C&S}&)tHmJ|@L(De0ts>0z)x`@PrC_8#6pB7Gliy-g%${e79v4yV<-$B
znvoP*(H~m18=5N-TH_K{hZWX@B(}0)tEORX!C@UmVcq>N2ijfS>>+)4VJ{Q9yLCz3ONgPco82wT&
znye|Bd@h>!Hu}|^1pZsM{B2489?7>>F+%<^ti>_HJuw`6F-YR{UN4rDIF?^7R?sSz
zTa(DNpV&+`N>VTCgI=7JYuv}=IN2sdToi4P*~iyWP9e}$a<$|TjX9`ue7r+pyyif>
zZd1JJT>Piocr(2ObJql&#mN4>(j8z;`Hgcba+$x>R$TdZTk8%@*%&-
zDbttgsJw?wB+4}z$AY{ka=fQ<2$FNx%(LXoBdg52SDE*>JWsE(9&R%)pJk!4WMQOa
zp*Lq;%w^rkW#P?dVhUwrlV=m$Wnm3wk;!M1vt*MrXH)NIQ@Lf+S!chp&Y=v=p_R{J
zbIYL-%3;^f;jqp=%3+}jj-nQzT$9TX5y}v2&K1|skets2>*swS&y#Y?`ZsII6iQ
zmb^HQr8vp0IK{d+O}{uJ1yP)i4U10^><7L+U`oEC{iI`^ulYY3+se@V>Ook|9juPL
zq&B6b9#+!WT+%*Y(&<)`?_dOxbv^vCB7E$`{DX$6;mr^JQBp<%ibg2chME_g^Z9z?!;Beuq}v-Id%9
zR@{@9BIYZAg9g7Sz
ztNSWCifV?TD*A8Lj5yUyE!8Xw)o%}~neVIL;}p@o`ub+uTfM0gv!v#kL8WL*jo8qC
z5?d*p+K=}&vcia3nG)(IZo*w=z-prWKrR#XJ4ctcZYQ+vpLMp@L6%{5-N}CWSxVh0
zdHsfd{pXfCOPqSEZ}sM^^)?g@--H|N6&frI8eA4g+#RB#qoc6lxvJ54F2CtoZZoRM
z8%Lkfzd?Pkvk&}~eoxpSpkPodr_dyC&=i~6l+@CcxX>hq1L%`07-B~US7IgW(@%2(
zxTaq1}&Xhs8W2pjRvrxKO;GT(@Dfj0@)H&fo5fYI+4JN3LbbN(z&Ec|LoKu^e=J^^pSNAB7~o?6!FM00V|9;^0yAvEyGdH
zUs2isQV4jy&%tu$cbrXUY?`RbVvnlF1ZnyNN!x_-;sn`JUKT4AzB{bYzG@^8>|Ni%
zO~`6BGs1%V1Cxerkb=3M;?v7YdQl62V)gB-N{3lhCStzMh%G=g=npmoF+j{&AZ3d;
zyp3!I%%wr|AL(D60;2q&suKE_l~~ukAR?NcFce#K(?2g(Cm-qhKkoqEfdkyWpxU9I
zyw9y1(pmMJ+qZ|>ozvT0+Gc+(&AJ`Ux>CVC9%udE!9Did_Z^27Q)`yw`j^4m$dq+s72s0(gnU?^_7FFp4QM0mlKeda@(aT
z&!y?MrTL|$lwBefin{esk|BjxS60i)(JjC4sSl4@4$@mxSrIctq07`imQ80yorra?W+VCt0Z_?U!8J2Q*(6(
zmZZp~>f{^Sau}!2@s$5EAf%>h?KdIN#F-
zQeq2vx5+QI`O$b&7H?Cgd^1=6c>#6@*1vT&_APd?Ep6j1-Q{(CBd3b;n8gdVB3
z;u_)cDyi}IOS^5Nhu)JlUZ7cyM@8=gKD&k
zcZ1t^i`(~umiLQ}_hEPkU3XY!gv(lrJHL!~nllbsGj>{z58B$dyUMqFj;64;%3g
z2iXsY+J}O=8mw|AhF;1$UXf_nDcq
z=giD0-*x5h@V>g%lY9N1`?=S{fXa?c1*%
z`mP>#uAbcHK3ZJ^ldfZ{SMl$xL*v%Bnbrv=*5RgWkeYRJ(hV5j2DQ=#!gPb)Z-X>$
zjdE>+mUQzB-{#q__4C*bj+_n7bDP{<8w@!#RqHc(X=Klaf`s(9ggv%IxFtm50&gB{
z36pN$ng|qM!)}Rv+?M#bb#J23U^7UTa#ogpNB+jngXcSsKJGk;+fkXGOLX%mI@p2J
z?>^((C009i+Ul9^>U=zft)8Dh`Bg_Ie~nDLcM5Ia#qC+u>{(9iS=s&P0_dk{Hs5RE;ERodzP9!#>dYxu)r
zYg}_^G;x^Pb(phun9g^!O}R?Gv}f&iWQ*N91-3YgE%vA`=jbipmZQ>vf823k&T(_i
zap%Nwx9LF_-$@U4zt{AnziVH8YI|HO6NNpAC;dKk?)%K0@3T2{nttuMcaHKVzAt03
z-}C3UdmIi}K1!%1f)O(3N!+^);Vs
z;5f%ExE0*e1_6I@f;tc4JhlU#Bg0?EFvFwM6Tr;a{5UQdVR33RV
z#?kka0f;jv*(tFl=YMh6o${jWamwY>P=qrkR0|skEdf;bE$~0&
zC$;)>KW)Xok;PG@-x^Ig9O3?_LJ_dPO-1NRS7@`i*!8OWZC%j~1%L2eOTup#b4&Qf
zYpg+A)XRdoleIXH?J2RHZ1P~a8oDf$|E&e*u^rDWyZWswLhK#Q%8kN>9-PPaSyuVA
zg~7BJ1)(d##VcP+45!DlDg=$cI?s;>Llw|@S`GeZ?4-*T3MHj3c9acSjq3ZeKV`5
zUn(j~tCmDs<&D*MdXu;rU&BjrbDmzS<%`7{mnzoEqf9Edzo;?Vbdn7(q($r=EmQ+h
z3}$KghQip~S}><(ZcQRnJAVz9$9uhw{NZSBJ-OZd#cQma}aus4|}xLq=Y@$@Iz>SDc&Qk
zz!u1o;U<#bER~`kV%@%zrb7SbUiQV?6t%XaiS@u>iZwZ9g63UV}QO#(^HT8AX==7z(*H{e;-FQK5onl$(8?h;nhN`dLu4LSuJp#mu~J!*=9u?0z7x6)*#Bfo3}
z2gdZGU*CCksARL+n|!q@?aod_Z|s11%JH$ji&Yw>3qwe}%q{FGv3<0`^6t`S@&T!f
zIAW__l*rg&@HnshaTa1BvHtQ?Ifd3QA%Q!%VfqZUe)R#f^mmre>eH0p%RY?l>`$@i
zFjU^*QK<$O<=EVPB^`S)HF9X
zeRrfW)>>S=;gc3qlx;ZJR6@4YMg5*})vY<%-2sQ#e$Ewm_ALSd09
zLnYl#@mie9$?H26>it>8tAS#M^O7UhJ*C<2v#GPNhFscZol(_=$!-ez+C&vY&9{s5
z&l&4$t_n&HvF)U$TwF1DSdnM`ojmh-hk;2``N#-Duylb|k7J^Iuu^fatiNjMeTB_4
zpWCmJf;yJ921kbWlJ_dZs@81JRrC%P7o+hM*B!7ueXZNYbqyygu6(wmh}Twmy2g{1
zi6c|X)m9a!p$OB6D(cyoY8Hh}-Myok^Cg0<3bAi(=eEBNY82EGDQ-oKRL(lLT|Z=E
zdgH!zJV!xV-sQrSEI>3oO{XN>bhXp`6PsPnhAXNjrTT41iR}z;S9yQY>Sl%#F-DN|
zW@m%qZr&h#^3H16u*2=`;+&XyBfo16VXNE8*xr*>rLKxGjYC6Y%a|qAiE<3~_CA{O
z`?{;r@jb2VS)K5zW&e-3P=q_Lx5Ta|n{`!=Om}|hsIl8Rdr>fJPw}ukifQeW^vQx<
z)=mGL<%{lfC4*_VZH6wy?jpl(VQOz5j6WRTLt}3}54HJRXvwtQjIG@CtFi4WFkQ8N
zZnub=rCNN5JW$+8Kr%;H?e2GXH5EiW6(a^drhOzsmp${1bgzS*fumq2|IHY
z`+di>`XuK3p{K^jLx9T;y#Eo;-L9AXea>C`DN7sp5OOZehDg<(cpr)P*=E(!VLi-&
zPSv13OujMf<9^9U*3^&ebdKjs8M`*-YS>8SX?i{SU4ermo*}>eC
z_T6y6)_cJE`Ifa$v%8L-yT_2HC!JSLsXM!cYe=|PP^nirosfwPiP^kIeCn)Kc$+A@5Jk-d)Y2!-5}k=dqrzRDB9?Hd`*QQB|*3Tkcnyyrzb{rd54!B)lVg
z?A=uAi<>N1ocGg7eIC+V3{kRBf)z&+>aq5fYHP6EFrODvmJ!cAD`ZzjoY6+!iVR;um7{$
zWT`)Yi~r>tL52>#0T4gxdjUdaK{PEvSJ?da=z`BI1W0lRpZmagq#7t&7Kqmp%-s^m
zNgqOCG3%BHhv=J-7qtZbbNg4GrLq%JndR
z^ED!BAtEd-D$*mWx-9HWUl`k!$ohMc4G~_>Es?2Zk?mh2JIg-$+=$A%5%WbYrf5MY
zj?4D9HEm#y$c><5Y%SHNle%W{mc--Z3t(=H>q1N~_<%tXovL_!B
zCg~Hz;7Ndb0>nCL4vFF|PrBHev{7<*i!>aN;B3Pfuh`wmz8OicAz~{iKnem^mFU
z$&+-KCqY*@i6b)EEj&SSILTxJ^#pDQs%Qkmk>jkqei2>zQWtBNQP_{h~EI{o`R8njyWemFP}-x`#n(Q+ax$
zXKK6bb040JZsClca;$WaXGTS2+K@rkSGb;oLpW@gqMmQUMCf9-`)qXI2IWn6#BX@o=
zcg6F1muwD|Mh^9bEB)m;hr>CHZ8=O8Y0#Uwr^I&T(=0rXq#~O%BcHq?my1X)#~|;D
zP2SZD7xotO&Uh6tR1};Y$-qAQ6e}#{n?z;4B`P6)M@yA#
zA1WQUSXjc`ge`eKU1B{_;&@czbf(nVrqng66j@Q~(pKueRO&fW8hBJ1bfzrWrYtn7
z%)g>6q^&G`sVs7&Ea9ju@k}``5g|FM992>NsjWP9sXTq8Jm03Y{e4OC&A3vVit?z6
z%8H8WkqY!tMctXo`kR%F8kNm9m90^g?G=@sBbD7pm0!+O_1&x*(5M=+sT#hZZE!Pv
z)GK%5=-QaZi^-^BOor#|h3dJbs(Fp-)sgD;w(3o}>J7P?t((>R71g^HH3u2h-=nI5
z*>F@9
zyGR{}Cbo{twvH#dj<>Sz(r6w3aorWh&jKQ!uWNoT^?D|q`dNASv&ix1WDZ)<$`HwR
zHVLD8G5Ochy!H1gKPxPMR;sL*Wo&q)SubzY@X)(KF|$Frz2QM+gUoWnW5z}u+lE)s
z4X=qCbt@b7+8Yf<8_kXz-;Oru%Qu-AHNEj}dUxDlJ^ESjxJih(*^#l?S)|!jv)SFY
z*)zJ?yRz9=v+8VRElYbXvu15@W^F)pOUQCF&&REZCcFrGnEXsE-*><%WQI3jK9*cB;x2;fVhdqeuet6TE3|h2#tJLH(U6U657HlgjE-Sm~AN=qa}0Bh@N?^0Zi0p*S!X
z5QqWMqJisZfE5j*F$2=ppFq}o5DGm8D*zd$=g!Ii)5(D0tw9T1BErSN4+_{peDt9C
z3UGc*w8N$FT}8gB;gHK%HW=N{sEnIc1pAYGJS8N
zPx+)T>DF-a(_ty&(3Yg`%&P7rDHt^xL?R02RR;6>gU@5Y?CW4wEs*qvs(w}v>sZg{
zvtJCVzBYFZimnVYXN}q^RHV#)xp-@g_vx71B)}+&=QjrC$OW^TfzPABg8pDee!yh|
zz-1sYCyXm}^xYpDUq2b&jOk0f)t6#7A(b^QrSeE6t6w6jUz`ZQO)qkzz!#Lkg=fF=
zu8-y4iuUUk?L`e5pA5b^8T|OPpR{9?etZ;1sF_ay#_!#9iU8X@fd-joB1KO_C01*Uzj1rHtoFe7}mVV0&D(`bIH80b&kjM$2diBQirE
z{Fg7Btb8#ZyD9-mC*NM!uP5J%tJ;Q9KZPZc3>iF~>p^Tt`T)9T`<#HOe#8c;)&RwGSV8Q-
zS<=nKm3fP7*g2m`>+#u;6(I2vjKF^m7@xqdOiaB1Xp~nukYFN3uv;gH!GAa!S)-3T
zH(Z>QTI(hh?Iq#IP$`4x$Yw|Y5UW2xor@c!TwHlNCEl^aI=)j)<^D6REl=G76K&i*S-sBhvm
zg?BplZ-3vn(FdqRdry4l8JS1wVlP)Q4ksB;e5wKvXGb1P;4-56Nwfe)GEmUDULW7}
zY|{Rh2wsW%JxK@NW=sF_t-Sr@xFjh>i$KR#!u*ke83HvgA$GXK
zbl1gfkM!b6E_Tj7_T=^?K*E1OW7dnjICo9wL#yxTgWF$1t3g1{+F30SgD41*3!(%-
z^!~VF(R;OO@;P>q-F`Nwy7zlr^b(=u?Uj{D#A+YO>a+gY1|#~7>hbN)gIK;{Qj){=
zoPFrIJ|fJ47Gf64yp|-vA#xdeSLDUk>1cpb#Qd4bdpUzl*;uCUJBMqy@7?+J!)l-Z
zhUh?~bNcjC%@LvApBjESCo_go9K%%SZN&0yC`@de9OhtgpT12+VGQf^b=))HKBUA46-O-DgG`rj
zw0g4qH_w23nErFSGr7~wBtJh1l^AgA?8J{RIn0Us?BjHTdyKv(P1Ke`NHP?&Oe!HP&E}JF>NWm>%wRsJ_UcaUsKc
zB+~F^gBip;j<5#kfxF*ybAWliHu#u&Q}B@~h2dvQm?gNBKM9sNSWoswQW*eXC<%FC7C
zqHjOorKCUGU2EP0ejV9hYL!^qdlbnn;R3IR&DAL9IzmMR7mvR_e5D+E+pQH}T_8>Q
zlVq5%F?=VrC;Fk~l@RL?gC`kEc^3VLA%>4~Up81@^b#;qFMK>WY!gM6Jz8o$H=O0Q
zmpxu-yLY%kw4Z}PI}@C_BjI%c+s=eULxf9(_UsR^xi5Cz*KX
z9k0Tnc|spbC5mQ6_}cOS?ki)BZwjuCWxn5=#W~&9G4-!V1)nh%Z4R}Wo-t&w6r
zf}t`PB$UP-;{fLLB6Rw@=cN3u({8@)`b
zHW#D#s@Ka$_|Vwlwl(z8p-m2&uysTvho_gfkFTG9KwwaCNN8AiL}XNSOl%w~J|QvbQ*ugbT6#tru_Co1`J?>IqT-U$
zG8?)es`46iZQbYkhQ_Amme#iRj?S)bG|bJXLoP^;PB}9}zS_?T<2pGv7XB`QMEMZo!z>f)?)AS}-fCBRXJQNcJnf_9}Lty;t
z(m?L0Vm_WK4g7UGynWRdHSUxT%dB0qr<(l9&-%)j<$r4nziK${-Ie#6JQDo(Vi_ziYCAM9@}O?17@4>~%A
zaAA=6;}4%R640cgB>6ASW1PsOxF(&cp6E@w(CMa5x-weKV<+91UFj%as?jk|d2l7C
zPI>Yc&rf;rH_Cj)vp{lA`v}dZPWy`N&L=tn_0q0BcWBZ)HJSYNWnT+9lJi|||U
zqbK^Y9#3@BzJ+R9EPM;oa^;>4*9q01jnGR@n~gLoUYLzCYvi7bHXqQRi?Nzbn~UAs
zA5?LY0@R&Rl4Qcp@l4F=TohhRJ$fi_sm0VJe?BoGaZC*9
zX^9Ey1_>$ok?9#3w-*;OlfI;$1Hnwuzqm^&67lk0K4TwBzERKfz3ncZY-xTI_~rS_
zD`ZS;wU^0RY8>BqTG(CQ)J6o67f>_g7;Saft=KqsO08=;cS%+U=ORmliv+P7TU8PsySxI1Jb`_%dCeTk}&;fFaZ79(bH9mS)j
zy=N_l-%s0F_P$#(-kY!|=(HSnT3Z1PyWqu^V%+ws_NRR)naU>Jm9k5}d8=fXO@~WV
zm(9dI?EEkv78H9hpXwA_KA&EqxVx0wbm?fhaL@#Mv{E{keY9G+w|W%r{=M^l+Zya%
zXOA`P1b?$`?*2QsR$6*H>dN-dA{Pu>;`>=O5_@X%7@Pxck=GNwdrLtTjm1YYX+}d-
zb8>n{A>@x-rXJ~Irjy9%|(BP^Rg
zGaatG)$#XiBlXxp>_`wkGYh#Tl>@#28YH2q?H4=m0Ob|+c#tVWr9h5^35fz=J#Csa
zoq9gU9uSGuIGr682!;&6#it@6qNR=8jo2O#DGG^4EZQWz$OTa1MpZ178&?nV?!t#&
zg2?=lgvz2wH|HLKKzyH%KH5%x(}j`B0X|uQj$)yGs<|qY4NuiCV6qW*dlZHx~zw{V)xrq
zZtt~p*&lAj0VJuS1i0uEhHVr~Ayu5VwcAyAJD$uZRq{qbgNM3#5OtNF&Xx2YU+e8e
zW)kl6#N1y3#S5c3&NkvlDFd%(S%R?KJ}}u7WarIozT}o60PPDX@vD-h;OFVkr*jf$
zQoM+MhGbV3oBVRY%%4~$f$d^s`J*@fRMZ2*Esrx=`?IZgGPFoCRGSPqeFGn#*S)nQ
z1NMJX6q%G|VrQs1Cp%a^ypwI!k@0M=b+GzyCkIKAsYM_+RL8KJ>#mTgP1`oqD7>5J
z?~|#+DfhKieK$Y6BU4wf?Q5s?ZUKrUOHW#E_)Fw&VX8uwflAx(K>2P_u1}WHYq^o(
z;oaiWjw}<)wvq9}-4ZlOHZF*Ibc$iGv{@nhZCKmrtngl0w@4o;%)^=Xr@7y-kd=k}c7
zeRboxgRl8@1MjqQ-PlXgwX!P(?e@$Ze{5d3<8lsWz`y
z88kv#&cs3yUuJR(wfIbhUGLh^^*#|Y_rSNCR!7wKWX9&DzgW@bNsX*ixVGbO8iCI{
zCOm+41*gAZ#Q#}C-~qhkz07@j~IR$6l%Yl+D
zWm|LIiE?p@RpmQNLj{HtomCZkYnVT1
zi=(~u>9#nf8hZ}t_PHV2tpLD}OZ0_Yxs5zanS}!eOIf9}=}Xy_yNgRXXy}FI+0VPR399jm{+41EOvAehBvO~(?T^$5n+5Nqr`|F!x+XmWW`+K^r
zLi*jy+;czI4R?$$_MRVo!}9aK$Gw^)UR>7g
z2)oyPgbRckK}o|y^oo!gcku>0E&Hceztna(yaY9U%{?`&jRJYi0Capj2g*Kr{+BnxRkG7J;s6+NZxo5T{L35NbT{#a
z`rd`TRTNg3o-aeQu*e9=0L(;}$ze3ouMFp@L~QSI%jW69;g#*;Q4ER{aL>30*c25sP!CsN$mp=snyt05L?xcvj8VMh)eD^O);4JV8
zcnVPBo}d)N!qlaAy1IW_0s@9qt_CyjPfLHditLK(=@KyC2!u){-Xjr)!=?S09ZMO=
zMBCS`EFNBakip#H&iW}bB#izfB$V-~JVK>{f^yXURKyR8d7R$W%j@OPQ&A(_
z>NvFt%!TCNArM(z7v4W|aUQ;?iy8hp&GwuB{nMAR0LXDKQ;%UrH2mge%rD|Wv}z7k
zfBGE@d?b{3`a5Rqs2c{$lN0%#MPe*BXfsBNW4H-q$UBVOKB<>>}^%Vhr(uF
zE+T4#%{v95_fUw3GcSl!j|xgd&lpTk_0~oHz2$qi8>IXZtT4tP8Y)^UMsjdsq$C^^
zYvmzilfzcV3P&(9K7C402F_K0Lrq&ga97~v5G7O7P!lyOd3k!?siLPrs1LeA3S;kB
z&QB>8k1VbVS_#Yl@?yAkf&>fzIsgs;&^Kr&GoeSs?v(I1Esv;ZXycu(>XU^JIrb`J
zJZXHvwIDP_5dGwbm%`1K
zNv^+?dP~GePp_-^f|3dWp?D?@hltB5dj0x$aGTf%&;uxNe+eLPLE_=i1c9>~9+f+-
zyCaC22I}VOLqdq4!gnH}iF7B0f}jv6831D>BZ44E;Sfj;I1f?)W+Y*xWF(S=0}wPC
zPQ)0_$Vk^r4=O4wc4VZcBBAnktOex&U^oq9A7MWNJXDY~PsRF$p2#g}JrO>YN#+AM
zC4xcUe4O_k*~#I*2qk=gHyHP1oTpC)brp5&X88eI^7U?9;%G|^R=&YNQJ+=}LkW0$
z!Cze&QY3+(@qC`JU-5_PJmBY}hKPaIkaR*+NJbJuu!WSq2Vmnsgxdl1POfg^R@PQw
z4o=TZi9*D|AS)nI|McMqNuJ$A$$YS}5sxCFf#yDe7`+!KLIjs<768(9xZ&j%xnKTs
zU;u9=_B=29E*=xkoBB$W%jVEIUQzmti$eR|3u0qP>
zVt94vC0&CqQd1Ek+y+sPc;WT;?~^GD-IV?65NJ<6wKbD!K*|AuUE{R`V6MFS7k*ij
zAb>Xp_XnY;s{x;c%$Yl-N@%I97^_Yp`SW`Dfu;c9EZ)QqH09S#N&olJ)V#R>>UU`B
zInDY%gr;!vF#wYDlYD$n%w!~ADNdzC`Iw~i2b#KnMjSA~drB`=u82mro}#JZPW8J?
zv?`?P1X8fkA7~1y3708Htem2$PkBt^4^N(~`Qx`spQ5Rx<9erjvi}rK)$f^pY766k
z*&2PXAh{z-=p9Zy7Nm4x<9?QpJ&7`ZQ$8wGS9_kd(SLsPj!rKVNrkg9t+v9bZa@Wo
zHd%Q@J3~)K@mi|GgL!Z>T9{g#?9&ScQ(*6noznezs{VZ~U5YwRpn^2Z#x_!Dw@u
zq#;6QcGs&JP<<1^!mA{Hw&P&=(Q)oid1Mw^EJQDE&7k<7DMUXpI$r=`*;S?&n;9|P
zVaIM(l)f<>rFb*N`+72u55G1p(zH(X56J|u6k4MoS{GL
z$*Rm=eC6R
z;ZCjOwb96=%j?{vs5yGMi8ah2W%bJSR|(p)(@7Tu7GXajRZmY4ogMaQrp$Xf3Ll8})DXFOXX>!OV-*s0`uc
zYLPNVq5BheZ(qX4#~&JKFcdH3cAM8mdpEob&k($}C?0NHA1jNFg}(42x{p1gbRY$l
zkhx0t7%vJRjCm5PZ>`koHxk`nrYU*%RB&B7Vs+&@O2Sm9sE+i?_U>!8fGYAEt`jHZ
zQ#PhmD&KDp0H!RA_o^}18STV|NFjJW-+oS)H@IPPjKmw-|X)$Db0
z`nql=Ot|mdu-J{OGo6HAE=Z7mG84zT$T_FYcTiL1O58kFhKor;qC5E;_`ChduIq?H
zjL5P%z*8QN05%F%w`;ss4%Gf1nJ2&`PD
z{1h-oJ}N^%k~Hb}uzQr;L=epqO-|x2fJd9<4_w)FU~+Cr%od0s6jz5M`1+hV&jsOs
zeH=inVje`cI)s0H>K$^ArlEx7-Ak`FE1r(8hoOm+`Q7x{*eSNco8^FPI(Ep+KQj46psigYD3wQHU(mc;AyUF$?Fzlqt#LNl?$+CN2
z*-2CONq>|g+n=t!ldjrfa6dZzFK9}3AU|>^^BwGgNDnUQp?oLH%qIhfrd00VEyy0m
z@I14rZ;{t|lS6lpQpolIe>!9vk+|GI1|w3v-~Em#?U4=*6OpU%_3KZR$sFA6;SA?7
zE+KM4nT*-ZLv0M&X;NGYHgD5izP)&1?<$W+AWuZ0@`NSXlq{XnTuwtpk5k0*y`zSt
zjeE`R6LM<>m&=3JKcgv=yZ0}QMf2mXY$JIM%5JM!Mw~AlUy*(LwZzYr3st!;fz27~
z(-7T_EeTk0S+gN6eRfd3SH0W8XE$9jaS%yca{^iaxN{Q&PzqHMWal{0YU1u}3$G=$
z$aOs5{v3WJSVtFY=6a!WlE`rXGb1$5O+tQ(JW3UPRw~c^v8FanoKWpW*9|Y0N^Saz
z{l@E^mwjGVV%86TMpND%8x+4sQyfkKIB)T9&{TqA+3(QQ04A)refE0AL5G=dLG+yb
zoKjX|Xoa?W2P(pW^OpYgR1I59?Ugy;;H6XkT2IqOfA1aWp*CX(5@AwPIp(G=f3kE~Qg2
z5i_e8omVU=AM9AQsHhm8voEQfQ&_VeIU3);ZeEGIvd#YJXaXQDMQ2p3Ih;Ahz?4es
zXgk+^X(}hl{7UON6*oLJG$TlBN*e_`H+*f5rN&4QWSU6
zHBJ^)yUGVjI%`~97;2aF&VYydjWsirl8Guw4oIMvEnJ+gBaD@-2=(&J#ix;km?FH_
zRmyuD+c8;}$iRfY6qdZES_VVkJ0Pb9~?+37??nlSGXlG(VF
zxzD||>-1H!#VxY+cuE8;eZ|ke6X$TgHR?xez~<}n=XAuE%hvbQo$U7RX+2(3izyvU
z@hQ^G5#FTLcWL6pj@4;=Cu8D2jNr`P5ShznO2;0r+F*|`q&558OaKHQ^s}bJsXhb%
z15Uu5rPsBC4wf&aUeocHQgVOU~!g)6)@f!BUh3=R$;ZZIM!B@Gf{)f)cVhmU>8c_A47sAue4;gzI7*R
zT<2#-<1M`B(_ux`f-L4qsIubtA={@F^+iO6~21AOtW;{BAz56CP*>
zq9TgNh?I1I5WK}aMb;7m6a@WWyG4OFm${f7a0yN5cQn_BumF^bmp>kS7Onz#fXyFi
zn?b3%8Qm^4{z)Ww8$yA<#EWWjJV2Urduf-6%Adxz#DMw~W(t1Q)lvC25Vfdt^AXw!g>=u59-zOJCgS
zd!%BFYd5~m+WAepQ76oJ_p4q?*6y%T$;$4CS<}V6QS(9Ly)mn~ti5sDy_LNQB*CS9
zj5Dpt{-ir+_WqQ&;OhRgzx1VpnP3%@gKy!lvkzvYEmsfbP;QqF=aa%r4i{2WvJV$C
zOI8nmZ8!SuctLV-e~5`Io@m+Tsz+CmgYOz?o%;6*%^ABbFw>XxpuOL
zapU{GKNDv9{a`*N=lkJu$=dg$^(H>-@%Er8_GEu92mAeaZw-qDh*1E63>q8@Qi7gp
zx@I&~c-^65Qj}Z(sD-PqJI;uQzvoxIacH*gB!Sn<;P@M0ivU=0C92>@iE{dVi4rJ_
z_@g7@6wLY}7{iJwSbpP(P`hpfIQ+#C@yy)+Z#p6}J;7Q3-Cc{H91+QvnSbMm_?MNa
zkrK1%f22h9-w#)2E_lsNC3Ckl7pR--EvDc{F>3*fgL#a+&AG@rM<|5Y+_l5zkpq;F
zShX4LVC4#hTkJhs)d#sv((u40`B{Lir5JpnIe^g$N&wIGnh3^}k&WauW;6+mIO4&f
zU@CQ`8r+?oJEx9_ni|~tfO08Y{o1woTTP2oM+EC<(Vx2(he4vAyijp1oFn2-mZ*P0
z*CN%Ljp?85S|}bOnGo9yr1v76IsR(b;-4r{r^imt%=Q02OVq8uQ=*h3Lm{B{UrSV|
zC-SsJ@$qkuyYbmKEZm~_1^bbniXK(~g!fZLar;9>@%XD1rJ92=xcKdFE6P(sBlvGB
ziuT{DDB&*uLls39?|-_YeDx((Wxrt9$);8C`Y8e~q<5tjS5b0l9wGTB9m&BG3}||v
z@?vg02+T|nnyXW`a!QlC(T7YD^
z8>xS{@^!oe4Nq!3+u@E176gGZztD2PGtrx3CBBXn!|Nc#oFxbF`L(>y$Lb}r#$3LP
z@e{?TLg?-o5AXF4KUf7b{7FIAK_kONaLgqJih
zjPdpVrxhjg?^Kk3ndo6yN4}jT|3&nWd7~--a|m2^{a28c;DyIT91oL*T_Xy_d90Qr
z!RN?-DOb4u$>|vlg!j`K&Efau>OWdSbE5tipV5ke(=(dtuV=J39WI`9SEVHO@j-Y#
zf3$@Ds|3)BIG)KrB!F<1&{NySKYB)U0(TQ;zfD2J%BeV_fOppk;PT=2``R>c6E0;{
zevrssC!(Isj9FBS9su#E^82Ie4my`4cu^RPBWPD!pj#BY@JX;141`-w=pecWMM3mN
zXb_Z9wHpa^I&2_8AUb6%5=3UDhsGm%K!e0%RQ6AROug|(0x!9gkpQGwCei!+gJdK?
zX@;3XQfdJhnn4nfsLue)2c3W$QXGKag$G(78-DfsugF#4EYqP^uj|wM(LKt29;Sol
zOpF-#aBx1t7#EEF=gXCL0}_ut-3hI9?%R8PfpIZ0BoncuKhbH3w0@EtS7`_|wLDMs
z8KJ);ApJqKd-2cOHvUWHih%d@jP^qSiKPGY0_ff&`2V^y+Tedvx%$D~e>$V#$`wAR
z%#SnLZ@4@2X}OZA`x}-}b%%S`|6&O($fG86lfk8TY`DwFv%Zx?)pH1Mcqn@(P^HK;
z1lk%P#C|#K;X1Ov;ZdTF|7orJzr>?1d!N?2>k|9*AjrS1)){jF|Mpt<#)H%sXHh%$
z1mPJl3p_`RwK_xkYtS*buO9@=J?G-h#b`B4bCZ;IxuS4`2mll+a*rP$4A}z}XGjx|
zx4j1uD*KD#5eWcdAGQe1o`c~0%^3R>F=dbiojXzt%BYiz1Qe;Xut+>==57=Su}JXZ
z`M3uQ8emlQcYsi#n?X26rHsTsBdnJQ=8S|g6EMp2Yk}!Sn(=Yh2$G39$y{l<@0Uc_
zE$T&$E}f(#(N#nHN{Qcy1n!hegZ%EZmnB8$SgS?)FscW9icF7y{nq~e_oNxO!Roz~
zPVk)=Y1-r-y+cV0OJ#qiOC-wtwReLq-FHyQNG@;0w-WBwWrCGQ21ghFIg8pqTkC8i
zQs6&&jQ2zQ{~Xz>{tsm9>F>$b-#>@`PmwLftN;DyP&pD2GI5IE`El>kw4MsP2Smj9
zL&s%>d<68wRC=~ZTv$p8la34uErtg|aG}R9W{9mq(Jn%CtC1xe5Aos3su(~5iePl+
zRaO?Q-B|iWVb*Pjyh%U+dG_MmIavn_1SN%ljDNf}=*!IXyM)}QC8nVMZ%Y$YXj>>8
z!HzsrF=@#DHa()`(vE8HjQD{@Kv#s4`!u2hjK6GA}F
z&+!Y+kNa1OU3M6L?9YaZzYE|0zb0Fc{#3CeK9TtqzCY*1S0}@q`~u(4Wx^Iu;X8{L
zDGt6j@jg5MtE&3Z)%r0I@QW)nWyt9F0|BSqztQuI!N~?JzZo{va!i9o
zb^Yvlmg4!F!-meUeE#0B;lF7hKnJAtr-u!(Kc)@;%F*)AIJ*8r_wQGFtH*EMP1_B^gfPZ=*;1wyEo)|i$
z&(_t=RP#R{HvD^P>)$gFF!Wa{&<{)34@cMkq5>2X9ro;W*zia9Pe~Z%G_3b?_pc;H
z&Az>M^tAhzVYG(p{#9+dNW*?s72uzaP>`Q+k?8+vT!a!60U=H&h#y6|{xm_%8bw5N
z)gU-@oe&KDx3%;~LZSb*mVOF|iVO_=X3D3aAx=(-!tT#Z`TWgIjZ$I0m5Tu1ap~bo^+guL
z4fj7Y<@2}p^iMX;|3#bnmkHwkmX?0Tz&Y(-CWw8LvY&Wij{efpm!cd06*1$i-S{}f
zOo2B90tm23razS)5Kotqp7C$`LH~!-1OD#sxeyWa>40CwBgLNw{GQWf;|BchFR&3%
zmHu|XuQ&_$dyf5Ap<)-X`@e3&?+<$)^%5$=><+@iP5AwEqL?KE0MKyztOE$Jg2bCb
z>DE7!N+b6_Q#dgTDS)7;)C7;Q+|tQMOtMIPs;iwtEY|=L{}--sD*u;$G}U?<0JPc6
z8DBuB+Zl_CVf=!|XJ7(<@cW(I_}aqv4p5JC*_nq5cE)vWt)GBkLx*svB%$T$BI3y5lK
z;`)U`I21+3Z)1W>q_vhQV*gWioi{?
zQWl7$Z#i=br8YV(o_`fI>nh6ZzUbs_Z!_siRxUH=Jf!80PZ+*(%xZPhzk=E0x|S0j
zo}Q9ua64&&7#2LqcjX8l7Oq%~?&S=;xo(BG=75JvtU!V{iN1UQFWT_mWZjXV#v7J`
z%|N=zHeUjpnh(o}=n`z-o{`)~$iODf*C~I$iiVR7rMmvG{qX_KgK`c*DK)azR1tg$>jUmAq
zOcu;68-4=>(*hHT0_O-XBb&*4t^ryvS)lw5tK{0CCw=I%kQc?+5G6cPEdUJgH%s!P
zu>h@A0t>%h{q7qRD*+Mbuc4*V;;{Sf&i+VN^4g~Av$@W6A}lNks-VfUj#f;mqX0W0
zVQ%|dca(rA%bCf$;+a|C5;itWco9588Y(8gw}6g@R|)%r8zkzqF#FpR13EC1STvp8Vp4fy08I`!~dw^(9lANlr9k!6cCZlp@;4sx_jsuy1R4e
zlx~oQxyJ8WYp=EUKKnZRy3W4c&cDEoA7-BOdOz;dqaml2#laTezVuE{qgA&2ev7a6
zfz$IXF;(!&LQq{8_G6ngKoeH@{nIzyIbI6*z>UREN3OX=3HzGKN=w^}UcFtr*xI42
zJ^OZ=`5jOD8b>ll?Ax9vEr@*Jo)#EnT3aoRH6U#;BtzN4fW@e_L?9DKvQ56onQc4d
zW#p))rlO%C>Y9ez_RJFH-OQK5_8=95gh6vZF_u=+#hO^FNkg(G}
zWO^(0E|}o(0gob_hksd5yeyK`fdLD1$D!qDNy}H_RY84!^;?dwFT-~~VKdrD6X
z0qc&ZN&9{4VrPznZo~qqcak8_2&OT8S#8qq(tUX)m@xd)w<4M4=8OD8r{Op^o9svR
zn{xIwBPr-6^qcx8jUkhDtL=Ms>brAC#nj_4x5`2l@o$==mlIV3mH9vFzv-i|tkL2h
zY*5r(1AU?0p=?*^h|=HrbVWTiK>WPo8_(|8M1c;H{$e3-5ll_ahN~
zqAgKfGupprTU$GKfIiV)QQCK$sr`%mL!W57qL~W-RsB~$nx<|MR}$WGf8fAJ6n&!Y
z4sUxjU}+IM4x^kg?_fbAq;|9`LD-0n7Y&EuiO?11JBUv8+rx;A>s2m4M3bK@cH9`v-+$0eK4wa
z5mV(H)lAcH0FkU)zCU>0P68#ZRxMOB-aFjAwx1K`eG!ocw
z61Q-@q5jZmJR|uy+x}+r%j2u*>f6)OSn8_DA`8ETxx-)*i37jK*UR@&=N&2%N3kV;
zk$>0ZZSDQU@Uo1Xoo}5D$dbXE9eekyc@oL<&hM^=u_@PI-?-0=y5F9bqi(iCC1=)4
zT+ezNZx3=%XS*ft$LkMKXZAn83-UTgS}x)Is9$%%$8{xygNc#ge~N#m4r)&gz>&_h
zT~2hk&h(U?3__j-Z#+vh(O91+r>rxa&6A_c^WnK?6@?dphfXgE
z2YWm%4;crKKjfd5x2%J=e4MwUmPbsE+sz#xfYRq$&_@IAj`H&XZMf^Q`{>B}Xm|M-
zA${JT`)IQJe%bIbHT5-z`DuT6MW&BE6)v{Os)g9ICyYklrqEKR2Ww_}tHv5<&v`
zxbw-271v8J7vc|r@W*+D%6bJ+`Uh@6gxLLKg#2T*{KMe>!ASoExcN^ZNTw_#%OM~;
z?(gEC7n`dIrSA{+z)$DCl|q4~ae*Jb=Y)~tEP{(8NnbN!K59*ylFqn9?B^fx^f=EbrHNJ6w22f@^~tO
z-z@SORiv<7BzInFe2
zv>qbbU@A(DDn^4UN>)2Y&Md~%G3IN0OmA$2m~bTTMdS~>Si#Lm8${$!|46j^_dPGx
z9ue!%9m|aCh>lGAYR5qw(MjmIJ9kh*6%o;4Q_)6d(cu@->KyU0X7R>((UINp@l)~9
zcnNWc_%yjV;#NyLTgyCu%K~A`B00+vVct@-0O+4krJV@xPC%1?h|PrB&BS`V#6}Ki
zGe=^p8MIw4u?_)k>V~#$Lc8#idZwUb7tnF4q)FkV2}BaIJ860PLRZX^
zf5k&LIg+;HlXf?g_NkH&9h0~GlTWo%&byP(@=_4mu2-HI{Lc^k?{}jOPg%LNysr^E1SIG6XL(WT-Q-oV0G3EX9p^Wot6!r!#*Y
zWy;}aDRO2hQ)g+2WNGSTY1L%uOlQ5j%+kis*5k}Jpw9j*lKrO%U6cKFI@{zj+YCR)
zoHNIgI>$~V$6hDLp(e*^I>+WR#}Pl*g)`TUI@ebu*H0%GQj=?dZ%=P!)AK-wgCZ~L
zSzZj9{L{&cm(PQu;lIngq?){B>ikrZ{A|wrT%G(p`TRns{QS%OBDDL5Zb*w1RB;x-
zbqWyj1$9mZHJ1hTX!no0ut}t_gR`(pr!ee+ci>cE&}QKf+WJd~8efl{_!>Jo9gD0f
znw~Cl>@Hf^LKS%m7kg+IgB^`2HA|Pj7Q0WCPEi*#
zC=_p8mNIRZF6#V&|2P75Aip_&*<}%CWIn0oQxhpCY~?#*#^@`FOv
z$E(UOx>cs%s?2MvEDNfv6059ds(vU`|3vd}y49lFaVaD0Vi>7{1aN{!@M;-&sAIVw
z4M%`8iYKTR9x?+DyMjlc>(Qc!7+pl1Ga?}o0j)(O&mdB-5b5Z8v}jGXZcVOpO@3lc
zVQo$EObzU+ri`YxLbNvh8!i!!kR-&R9wTZaqN4YhR_E8hkljZJg6B%Li
zJ(uPOV$I~xX3Dx|`od<0**{%o=IdrAmlhUuMjG0}j_yYnw(!liJn3r@xJI?`yR<$-
zXQZL6LUpYYg{@MvtKQ0ORS+s
z_rnt2CNL>;R+KY!$clFSH14>(S79pF@l~(G5*?6M?D*l*@twQVPOvn)BC#9%e>2nw#&b+BOs_NWVS1;uPZ{Y(~q`0s;(>cx+@aenE>sM>FZ7*
z>`LVBNfzsgHt&fe?8%+&&e!WH-gyL(dMv&SkSqdK=DpynH`kb;`+QPeOzhYP(ncUz
z{HESEPgVil+8*=T-r2ss>%IZnKk?`hz5X$m{t0M5vaWx6wtuz|g$U&1#|H!SG2D!w
z4arnBHh)Ryd$QtL0h%_iaLGYotAGYI+xd3mP2%A7b>mIl;4SSCAU<@dH-s`D!U!J1
zavdT}8X~G6BBmQA6(7deA12wY3p7eSP-Jhr2jJAaqQ{ZN83Kl02r=jh733Ke(H|9>8x>6&6}uUgpc{KhH1=A2>K`GdVx5jz6X8$+_;yobCCnKVd39@ug_OEUCw$zvny8
zgk}GP4LTrAG-(?=Y0oq1G&hO1{t5_@CP|Z?^%Kr?NQgKJ=~sjdp&jtoh2sR3J{Pp%
ziiOX+Of3XWB_vHj>!*_Erc!UF(&?uE1f+N2OLet2T^jYy!^J$q#dj-!7
z+;yhLVx~@irXgvjo@lnQex@~e7y?Ya!cW5|g!jzNKDy65B#t}G<2s`M$NQt_o13zj
zn+cwqzL}dPnny=jSHWw-26Gu!iE0AmFvRg
z&B8U&;w{}Gmc$~?`~oiTBA&q_f!iYf?II!Z5^@*GuUk$|T#gOrqbdeiWj@in;XVwh
zWsq2AGFbllVwufydBFRR_%}PZ%t61xCxKddY_M|Qa%DqlMX+H-IAn#9cST}gS$c0-
z>~=+De&rSKDq7-`onL(|u`1QD`gU(sUip{e?W&sFs+Q$1^@d*t2EPp5etmqlW@51R
z*=@}VYeF*^qWxcCo~E(d||t?>5YByQ+8_VX%!x|0+VZTj#f%ZnxV8wwv8{+8cJdd3Sn3
zb~X`v-3Or^Wjh2K#98?<8a&b73Dq?PCq@gG#mpZ1D^jJJRS6(|HfcBoE0A
z4=LRbsZ$PV8xQFh4jE8~Ob?HkC68$K@xrdQD;kc9299_aj`&bV{11->B#)mP9t*i2
zi=-TjH6BYW97~~&WgecqlstKDcp~e5BA;@i*m$D6aH5JjQG59No#bx~!{1u&zvcH8
z;#22XP`_D8PK_U)nn<30Has=`i~BQgJhhZOjDs~dD+7bdXZ4n6KMl{E+|Qg-&d@HO
z+rx82$eGu|nGXr--0$&usq%T?_w(S;^U#v>g1z%dl8fla7gov_@!u~JLob5dFZ>@~
zhCjYcmAuS0yv$v=$Sk?c8NAFxT@*jOLi2nHDp%0&SILG~HSSlnDOYuZy1)*21
z3ugptxDnTM{j&jTZli(XA$5ABbOoe^6jZa9C(WcvNIebX;sg
zJTx&mDK#Y>LjNcCnhjv%U>6jXkl>JDm0^{aSK?HcRaC(nYMW~65RLV1t(_e$U8vsX
z_Mx8szTxhHu@U6N;N^AK0_>zqEnsX$PV+84V;Mg(udHbZKG@?ugF>*5%{P?fjlkw;wt9v^{ixJZaETsW@T*`jsu<@1`AbTyG--@Gr2ImrLsDV;
zIrHX^)eS@u$G-*>U!?HXJ3U#O%=nViFek#hvry@`E-_!rbGY7}BiKmb>im0u%FA$}
z-p%pqWPf{bP*TDTbp=8$xL}aWE_&=S;-+{Wv1}}Ql03&<^1i2W{?Mzv4kyu^-b0`p
z^R5Ycm?RRClE%2JCj~}_aMq=;$$~ZjlfrR_2-*B!rYj*LXi+az0tat3Os4n*?3>4s
z#jJ%VpxT7>#BvD0d>|Ai{hg&uOA5Okj~p3m;ET5wXB;E9h8F+)*Al)~b+08_cHpsT
zk}|~O1OCw%O~%0-Lr4rN?!_hy91|%3ljSptuPNX@DFQbnBub4s^OcAWS`Pe-t|cef
z^_ZCzPvxzfXU=Eng6@%31DH=FhMF+$s7f_q@F@f1uO29w48MTOdk+^={pRE@X~WUw
zhIMV_@09ee_3V@nyxuOTXr=yUQ8uJpU{Q%wb2cxZaSII3&39>I%nBXq!ZnF^+}g|8
z?;zN(JDNZ@HBP4m$Po10{!vh1km{fbgF^A38HWw+{NW4s9khPG_j>Px!&V+;gN%F+
zRSo6(%;Dd~zbmr*+|sN*d>@L}Nd8`sF=cu0(^fHte!&@Yh5^xS?w{Rw;j197(pLa%nce~g~DY<0`
ze3g0--b~zS_~`fbWPg!=X`nCc&EzAbUV?-l
zh~L6r&`gl}ieoL4d=hG7xOdm@l_C?)#e1;21D1;(!T4e_$5
zE@_;jFkeLzMhdyE*Gv)Np*ERJEQqf33Kcco-v281s@dJ6QF9ax3(DgDBG;=keG~)l
z%i^`~?lrhP`dd>&uFshIUz!@n@k2rX*3>}#J*r{pbo_5ojf7+9#_X@|2{U{<Jf)uj
zqe++Va-??6R2}8VQqb-AS9JMrBLl{=MBeAAT6}#MFF&3m|2zMEQvUn$fbk-y_XS3~
zU$qeO6D0w^3qR2nXpaU=RH7+BD~oS>Q}UD5J->_JvGj0j9*zKYQQ7zhTt=Tbk&QSL
zFz;78AMVs@wUU#=LU&OG#*Y%UI@GF4!;%U=^VDj0+C49a5*2-UMKC>_P*sr|&29GH
zS!W`1lMjqoTgZ@@MZ%@8xMZ{ATy7J=ZMqlpNlMo(lbH^;XF3s|JaR
ztuqq!*3_yIp>(rc*hct!F)g+9K#4=G!u)=WVBI=hiQ{CT!D)VV{ef$V%cz3k*@R%j
zSrU5o3N*aks&4#4{v9hAp~!`r?$ERiWM_q5sdGC<5m`xq4=TcDe6J>Yv29iS{5Y1X@3Zo8
zh%Tpm?SyTyWyn+LdXT2@Wc%T}-Bn-JrRHWbU->5WDiyBBO
z&MXq$lvTeRi5U>E;i@-NwAUFO`E8Rct*@^pcs80#Z=0`BzolMqIbP^nS@2P5TYKiR
z2EJBN_>*UAHQ~tQ(W#ZMAJ2}lUmUXYxg88m_k9wjnHto1Uhzl#6HJ_*tg@>p@87j>
zrkUBNW8oeX`)r+pm^D=vteJG(vrD|1+p@E-Tk5xT>a9hOpXfib>%QB})xzbeeUq&i
zswYi=de~RuJpKNFmA9zN5{V|fjfVIzKt*hstP$S+bl^vjGi`SEY-RDc{%`8Bb>nOl
zjwRp4tz$~WeldR+?$fulj-R>yC0K%9Y>SVRGsM;<*Byt56neh9S7(FxoT>?+HdZf0
zbL2y7Mk~dCXQ|(8X$;nkl{VNGY1VHWp=yvyKbTcRoO`nlzD;W?vg681b@)-)b6_~z
zxfM9*@U=1`NiVOxC#2%2m_%J-xq1(HWc5}yG$vQ5L-Ffd-9qtKInan*v8Fnthbb(w=kQ0LFxw(3}8a%
z{ZwhGz(Bnl;sAgls_LPp9-Qo+78^h|8$iehu#SK*Q{6q*U0KtC+&k{<{7&qiejXIG0?$Ct
z86G(afs)QVAE@G=uLDhR^#^>wuP?q+nk>|SGxxIQC_AuEsZPD~ms{ABJj}XBmrfGgC~sPxKF`
zUpY?7##E0+@Tr>I$ipJAn}RAd-NwHFsg!OmaZZI{pvSM6z@9$2etwmmOx$Wr3>s;tdEt^FgTeDbarFGCKm7#2o
z!8nKEBvrt4x8uk~(y(LFa=h;xw(XFx^ID$se6-E8X8;)(KnMmF%LX1%1)eyj9LJ}e
z=B50eN;yZQkbDkIiwdNW3(sZuy9;icW@Oe1qlBMbO5+PrKJmWoPC-p&0{B^KmznRjGBv2P-Uno9ie%{q*e++MJ)q7u63M2m
zN&C#1{V5>(i##g(TSB&Zes%<#D|9uTb2{DbGW|a+whx`;ZSgV{wKDwzGX1}12G!(-
zh-3ud=LK`-g?`P5&d-3z=OqL{b2GD&d$Ljzve0vFx=y~nQ+`@Ner7&;y3I*h&4x{9
zm*MAp*C{BKFR1=nP@Z1^uPH!W;^#8|nSidNXdy#$*EJx
zc|yr$P095%y3kPq(7-T6VOY8_9A_AAA`HJ4MmPf_zJig`l-?69CDTQfk~^1DCYDmy
zmeS6Y(qEM_(3CNWmNDy=u{xKrCzf&6mT}LN@m`hj(UkLxmJ8^XKX)z{N-P(tEf9
zm$)jIqN$J(t$3+h@!GjUHnBp!7ES(DC|^~m(p0L6R=(4%)NroUO03kWt<;;TG`OlX
zqNy?#tuoQA`s`d~nppL~gK}3imqI3~4&WN}~L_#eBI)g~QLZs5vq>I*M>eggC
z*W@PF2JQ8g@(QQ0+ZahzHysT}!o@u#x8L_sPdTp;=+GL?^@^x*Bvu(=PZK|~GYGP>d
zuU*5XT?^W-Q`fFH+iq~(ZbaK*EY@M7*YVk0S
z>9jqWVm;YiXLI`Z{O(hOYZY3j2l?`$u>B{bcSHn%W75H6igTlKH)Bdl
zW6E^nZ^Xwnc*eE#$F=Ln&;p)8|G2LBm?6)Ek@$qX(uAqwmef<|^HQnUr-AP*_Bzn4a>_^LZNKXr-M=%mX$60cXETx?a6Q7FEpNevw
zibHd_}YT^|J$WvqMUweLQo6N^`^Fb4dNUDc8B_
zxw+Y!xe2=Y8J_uhrTInic|3jIp!!Mwx%q7+