commit 089afd672817730d348fb52d18f880444d27ae50 Author: iomgaa Date: Wed May 14 00:01:40 2025 +0800 DynamicKV-LLM Pretrain v1.1.0 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2e522ae --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +/model/__pycache__ +/dataset +/out +wandb/ +**/*.log \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README_accelerate.md b/README_accelerate.md new file mode 100644 index 0000000..e8ee065 --- /dev/null +++ b/README_accelerate.md @@ -0,0 +1,126 @@ +# 使用Accelerate+DeepSpeed进行分布式训练 + +本文档介绍如何使用Accelerate和DeepSpeed进行MiniMind模型的分布式训练。 + +## 环境准备 + +首先,确保安装了必要的依赖: + +```bash +pip install accelerate deepspeed +``` + +## 配置文件说明 + +### 1. DeepSpeed配置文件 (ds_config.json) + +DeepSpeed配置文件定义了优化器、学习率调度器和ZeRO优化等参数。主要配置包括: + +- **ZeRO优化**:使用ZeRO-2进行优化,可以减少GPU内存使用 +- **优化器设置**:使用AdamW优化器 +- **混合精度训练**:支持FP16和BF16 +- **梯度累积**:通过"auto"自动设置,与训练脚本参数保持一致 + +### 2. Accelerate配置文件 (accelerate_config.yaml) + +Accelerate配置文件定义了分布式训练的基本设置,包括: + +- **分布式类型**:使用DeepSpeed +- **混合精度**:使用BF16 +- **进程数量**:设置为4(可根据GPU数量调整) +- **DeepSpeed配置**:指向ds_config.json文件 + +## 训练脚本说明 + +新的训练脚本`train_pretrain_accelerate.py`基于原有的`train_pretrain.py`修改而来,主要变化包括: + +1. 使用Accelerator替代了PyTorch原生的分布式功能 +2. 移除了torchrun相关的分布式初始化代码 +3. 使用Accelerator的API进行模型、优化器和数据加载器的准备 +4. 使用Accelerator的API进行反向传播和梯度裁剪 +5. 处理了位置编码和未使用参数的问题 + +## 启动训练 + +有两种方式启动训练: + +### 方法1:使用预先配置的accelerate配置文件 + +```bash +accelerate launch --config_file accelerate_config.yaml train_pretrain_accelerate.py \ + --epochs 3 \ + --batch_size 24 \ + --learning_rate 2e-4 \ + --dtype bfloat16 \ + --accumulation_steps 32 \ + --grad_clip 1.0 \ + --log_interval 100 \ + --save_interval 10000 \ + --dim 1024 \ + --n_layers 32 \ + --max_seq_len 1024 \ + --use_flash_attn \ + --profile \ + --profile_interval 10 +``` + +### 方法2:使用命令行参数直接配置accelerate + +```bash +CUDA_VISIBLE_DEVICES=0,1,2,3 accelerate launch \ + --multi_gpu \ + --num_processes=4 \ + --mixed_precision=bf16 \ + --main_process_port=29500 \ + --deepspeed_config_file ds_config.json \ + train_pretrain_accelerate.py \ + --epochs 3 \ + --batch_size 24 \ + --learning_rate 2e-4 \ + --dtype bfloat16 \ + --accumulation_steps 32 \ + --grad_clip 1.0 \ + --log_interval 100 \ + --save_interval 10000 \ + --dim 1024 \ + --n_layers 32 \ + --max_seq_len 1024 \ + --use_flash_attn \ + --profile \ + --profile_interval 10 +``` + +也可以直接使用提供的脚本: + +```bash +bash run_accelerate.sh +``` + +## Accelerate与DeepSpeed配置的关系 + +1. **Accelerate**是一个高级API,用于简化分布式训练的设置和启动,它可以与多种分布式训练后端(如DeepSpeed、FSDP等)一起使用。 + +2. **DeepSpeed**是一个优化库,专注于大规模模型训练的内存优化和性能提升,提供了ZeRO优化等功能。 + +3. **配置关系**: + - Accelerate配置文件(YAML)定义了使用哪种分布式后端以及基本的分布式设置 + - DeepSpeed配置文件(JSON)定义了DeepSpeed特有的优化参数 + - Accelerate通过`deepspeed_config_file`参数引用DeepSpeed配置文件 + +## 注意事项 + +1. **位置编码处理**: + - 在模型中,`pos_cis`是一个复数张量,在分布式训练中需要特别处理 + - 在新的训练脚本中,我们使用Accelerator的API来处理这个问题,不再需要`_ddp_params_and_buffers_to_ignore` + +2. **未使用参数处理**: + - 原代码中使用`find_unused_parameters=True`来处理未使用的参数 + - 在新的训练脚本中,我们直接使用Accelerator的API,它会自动处理这个问题 + +3. **混合精度训练**: + - DeepSpeed配置文件中的`fp16`和`bf16`设置为`"auto"` + - 实际使用的精度由Accelerate的`--mixed_precision`参数决定 + +4. **梯度累积**: + - DeepSpeed配置文件中的`gradient_accumulation_steps`设置为`"auto"` + - 实际的梯度累积步数由训练脚本的`--accumulation_steps`参数决定 diff --git a/ReadMe.md b/ReadMe.md new file mode 100644 index 0000000..6144272 --- /dev/null +++ b/ReadMe.md @@ -0,0 +1,22 @@ +## 安装环境 +1. 创建conda环境 +```bash +conda create -n accelerate python=3.10 +conda activate accelerate +``` + +2. 根据当前系统的cuda版本安装对应的torch、torchvision和torchaudio + +3. 根据当前环境的torch和torchvision安装accelerate和deepspeed + +4. 安装其他包 +```bash +pip install -r requirements.txt +``` + +## 修改模型 +1. 一般情况只修改 `model`文件夹的文件 + +## 运行 +1. 如果在4090或者4070ti上运行 `bash run_file/DynamicKV-LLM_Mini_Minimind.sh` +2. 如果在4张A800上运行 `bash run_file/DynamicKV-LLM_Small_Minimind.sh` \ No newline at end of file diff --git a/accelerate_config.yaml b/accelerate_config.yaml new file mode 100644 index 0000000..1e841ce --- /dev/null +++ b/accelerate_config.yaml @@ -0,0 +1,17 @@ +compute_environment: LOCAL_MACHINE +deepspeed_config: + deepspeed_config_file: ds_config.json + zero3_init_flag: false +distributed_type: DEEPSPEED +downcast_bf16: 'no' +machine_rank: 0 +main_training_function: main +mixed_precision: bf16 +num_machines: 1 +num_processes: 4 +rdzv_backend: static +same_network: true +tpu_env: [] +tpu_use_cluster: false +tpu_use_sudo: false +use_cpu: false diff --git a/ds_config.json b/ds_config.json new file mode 100644 index 0000000..7175eea --- /dev/null +++ b/ds_config.json @@ -0,0 +1,49 @@ +{ + "train_batch_size": "auto", + "train_micro_batch_size_per_gpu": "auto", + "gradient_accumulation_steps": "auto", + "gradient_clipping": "auto", + "zero_optimization": { + "stage": 2, + "offload_optimizer": { + "device": "cpu", + "pin_memory": true + }, + "allgather_partitions": true, + "allgather_bucket_size": 5e8, + "overlap_comm": true, + "reduce_scatter": true, + "reduce_bucket_size": 5e8, + "contiguous_gradients": true + }, + "fp16": { + "enabled": "auto", + "loss_scale": 0, + "loss_scale_window": 1000, + "initial_scale_power": 16, + "hysteresis": 2, + "min_loss_scale": 1 + }, + "bf16": { + "enabled": "auto" + }, + "optimizer": { + "type": "AdamW", + "params": { + "lr": "auto", + "betas": "auto", + "eps": "auto", + "weight_decay": "auto" + } + }, + "scheduler": { + "type": "WarmupLR", + "params": { + "warmup_min_lr": "auto", + "warmup_max_lr": "auto", + "warmup_num_steps": "auto" + } + }, + "steps_per_print": 100, + "wall_clock_breakdown": false +} diff --git a/eval_model.py b/eval_model.py new file mode 100644 index 0000000..a031b52 --- /dev/null +++ b/eval_model.py @@ -0,0 +1,181 @@ +import argparse +import random +import time +import numpy as np +import torch +import warnings +from transformers import AutoTokenizer, AutoModelForCausalLM +from model.model import MiniMindLM +from model.LMConfig import LMConfig +from model.model_lora import * + +warnings.filterwarnings('ignore') + + +def init_model(args): + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + if args.load == 0: + moe_path = '_moe' if args.use_moe else '' + modes = {0: 'pretrain', 1: 'full_sft', 2: 'rlhf', 3: 'reason', 4: 'grpo'} + ckp = f'./{args.out_dir}/{modes[args.model_mode]}_{args.dim}{moe_path}.pth' + + model = MiniMindLM(LMConfig( + dim=args.dim, + n_layers=args.n_layers, + max_seq_len=args.max_seq_len, + use_moe=args.use_moe + )) + + state_dict = torch.load(ckp, map_location=args.device) + model.load_state_dict({k: v for k, v in state_dict.items() if 'mask' not in k}, strict=True) + + if args.lora_name != 'None': + apply_lora(model) + load_lora(model, f'./{args.out_dir}/lora/{args.lora_name}_{args.dim}.pth') + else: + transformers_model_path = './MiniMind2' + tokenizer = AutoTokenizer.from_pretrained(transformers_model_path) + model = AutoModelForCausalLM.from_pretrained(transformers_model_path, trust_remote_code=True) + print(f'MiniMind模型参数量: {sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.2f}M(illion)') + return model.eval().to(args.device), tokenizer + + +def get_prompt_datas(args): + if args.model_mode == 0: + # pretrain模型的接龙能力(无法对话) + prompt_datas = [ + '马克思主义基本原理', + '人类大脑的主要功能', + '万有引力原理是', + '世界上最高的山峰是', + '二氧化碳在空气中', + '地球上最大的动物有', + '杭州市的美食有' + ] + else: + if args.lora_name == 'None': + # 通用对话问题 + prompt_datas = [ + '请介绍一下自己。', + '你更擅长哪一个学科?', + '鲁迅的《狂人日记》是如何批判封建礼教的?', + '我咳嗽已经持续了两周,需要去医院检查吗?', + '详细的介绍光速的物理概念。', + '推荐一些杭州的特色美食吧。', + '请为我讲解“大语言模型”这个概念。', + '如何理解ChatGPT?', + 'Introduce the history of the United States, please.' + ] + else: + # 特定领域问题 + lora_prompt_datas = { + 'lora_identity': [ + "你是ChatGPT吧。", + "你叫什么名字?", + "你和openai是什么关系?" + ], + 'lora_medical': [ + '我最近经常感到头晕,可能是什么原因?', + '我咳嗽已经持续了两周,需要去医院检查吗?', + '服用抗生素时需要注意哪些事项?', + '体检报告中显示胆固醇偏高,我该怎么办?', + '孕妇在饮食上需要注意什么?', + '老年人如何预防骨质疏松?', + '我最近总是感到焦虑,应该怎么缓解?', + '如果有人突然晕倒,应该如何急救?' + ], + } + prompt_datas = lora_prompt_datas[args.lora_name] + + return prompt_datas + + +# 设置可复现的随机种子 +def setup_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + +def main(): + parser = argparse.ArgumentParser(description="Chat with MiniMind") + parser.add_argument('--lora_name', default='None', type=str) + parser.add_argument('--out_dir', default='out', type=str) + parser.add_argument('--temperature', default=0.85, type=float) + parser.add_argument('--top_p', default=0.85, type=float) + parser.add_argument('--device', default='cuda' if torch.cuda.is_available() else 'cpu', type=str) + # 此处max_seq_len(最大允许输入长度)并不意味模型具有对应的长文本的性能,仅防止QA出现被截断的问题 + # MiniMind2-moe (145M):(dim=640, n_layers=8, use_moe=True) + # MiniMind2-Small (26M):(dim=512, n_layers=8) + # MiniMind2 (104M):(dim=768, n_layers=16) + parser.add_argument('--dim', default=512, type=int) + parser.add_argument('--n_layers', default=8, type=int) + parser.add_argument('--max_seq_len', default=8192, type=int) + parser.add_argument('--use_moe', default=False, type=bool) + # 携带历史对话上下文条数 + # history_cnt需要设为偶数,即【用户问题, 模型回答】为1组;设置为0时,即当前query不携带历史上文 + # 模型未经过外推微调时,在更长的上下文的chat_template时难免出现性能的明显退化,因此需要注意此处设置 + parser.add_argument('--history_cnt', default=0, type=int) + parser.add_argument('--stream', default=True, type=bool) + parser.add_argument('--load', default=0, type=int, help="0: 原生torch权重,1: transformers加载") + parser.add_argument('--model_mode', default=1, type=int, + help="0: 预训练模型,1: SFT-Chat模型,2: RLHF-Chat模型,3: Reason模型,4: RLAIF-Chat模型") + args = parser.parse_args() + + model, tokenizer = init_model(args) + + prompts = get_prompt_datas(args) + test_mode = int(input('[0] 自动测试\n[1] 手动输入\n')) + messages = [] + for idx, prompt in enumerate(prompts if test_mode == 0 else iter(lambda: input('👶: '), '')): + setup_seed(random.randint(0, 2048)) + # setup_seed(2025) # 如需固定每次输出则换成【固定】的随机种子 + if test_mode == 0: print(f'👶: {prompt}') + + messages = messages[-args.history_cnt:] if args.history_cnt else [] + messages.append({"role": "user", "content": prompt}) + + new_prompt = tokenizer.apply_chat_template( + messages, + tokenize=False, + add_generation_prompt=True + )[-args.max_seq_len - 1:] if args.model_mode != 0 else (tokenizer.bos_token + prompt) + + answer = new_prompt + with torch.no_grad(): + x = torch.tensor(tokenizer(new_prompt)['input_ids'], device=args.device).unsqueeze(0) + outputs = model.generate( + x, + eos_token_id=tokenizer.eos_token_id, + max_new_tokens=args.max_seq_len, + temperature=args.temperature, + top_p=args.top_p, + stream=args.stream, + pad_token_id=tokenizer.pad_token_id + ) + + print('🤖️: ', end='') + try: + if not args.stream: + print(tokenizer.decode(outputs.squeeze()[x.shape[1]:].tolist(), skip_special_tokens=True), end='') + else: + history_idx = 0 + for y in outputs: + answer = tokenizer.decode(y[0].tolist(), skip_special_tokens=True) + if (answer and answer[-1] == '�') or not answer: + continue + print(answer[history_idx:], end='', flush=True) + history_idx = len(answer) + except StopIteration: + print("No answer") + print('\n') + + messages.append({"role": "assistant", "content": answer}) + + +if __name__ == "__main__": + main() diff --git a/model/LMConfig.py b/model/LMConfig.py new file mode 100644 index 0000000..8ce52fc --- /dev/null +++ b/model/LMConfig.py @@ -0,0 +1,75 @@ +from transformers import PretrainedConfig +from typing import List + + +class LMConfig(PretrainedConfig): + model_type = "minimind" + + def __init__( + self, + dim: int = 512, + n_layers: int = 8, + n_heads: int = 32, + n_kv_heads: int = 8, + vocab_size: int = 6400, + hidden_dim: int = None, + multiple_of: int = 64, + norm_eps: float = 1e-5, + max_seq_len: int = 8192, + rope_theta: int = 1e6, + dropout: float = 0.0, + flash_attn: bool = True, + #################################################### + # DB related configurations + #################################################### + disable_db: bool = False, # 特殊模式:禁用数据库功能 + #################################################### + # Here are the specific configurations of MOE + # When use_moe is false, the following is invalid + #################################################### + use_moe: bool = False, + #################################################### + num_experts_per_tok: int = 2, + n_routed_experts: int = 4, + n_shared_experts: bool = True, + scoring_func: str = 'softmax', + aux_loss_alpha: float = 0.1, + seq_aux: bool = True, + norm_topk_prob: bool = True, + #################################################### + knowlwdge_num: int = 64*64, + knowlwdge_length: int = 8, + **kwargs, + ): + self.dim = dim + self.n_layers = n_layers + self.n_heads = n_heads + self.n_kv_heads = n_kv_heads + self.vocab_size = vocab_size + self.hidden_dim = hidden_dim + self.multiple_of = multiple_of + self.norm_eps = norm_eps + self.max_seq_len = max_seq_len + self.rope_theta = rope_theta + self.dropout = dropout + self.flash_attn = flash_attn + #################################################### + # DB related configurations + #################################################### + self.disable_db = disable_db # 设置是否禁用数据库 + #################################################### + # Here are the specific configurations of MOE + # When use_moe is false, the following is invalid + #################################################### + self.use_moe = use_moe + self.num_experts_per_tok = num_experts_per_tok # 每个token选择的专家数量 + self.n_routed_experts = n_routed_experts # 总的专家数量 + self.n_shared_experts = n_shared_experts # 共享专家 + self.scoring_func = scoring_func # 评分函数,默认为'softmax' + self.aux_loss_alpha = aux_loss_alpha # 辅助损失的alpha参数 + self.seq_aux = seq_aux # 是否在序列级别上计算辅助损失 + self.norm_topk_prob = norm_topk_prob # 是否标准化top-k概率 + #################################################### + self.knowlwdge_num = knowlwdge_num + self.knowlwdge_length = knowlwdge_length + super().__init__(**kwargs) diff --git a/model/dataset.py b/model/dataset.py new file mode 100644 index 0000000..14acc6c --- /dev/null +++ b/model/dataset.py @@ -0,0 +1,245 @@ +import json +import random +import re + +import pandas as pd +import numpy as np +from torch.utils.data import Dataset, DataLoader +import torch +from sklearn.model_selection import train_test_split +import os +import ast + +os.environ["TOKENIZERS_PARALLELISM"] = "true" + + +class PretrainDataset(Dataset): + def __init__(self, data_path, tokenizer, max_length=512): + super().__init__() + self.tokenizer = tokenizer + self.max_length = max_length + self.samples = self.load_data(data_path) + + def load_data(self, path): + samples = [] + with open(path, 'r', encoding='utf-8') as f: + for line_num, line in enumerate(f, 1): + data = json.loads(line.strip()) + samples.append(data) + return samples + + def __len__(self): + return len(self.samples) + + def __getitem__(self, index): + sample = self.samples[index] + + # 构建输入文本 + text = f"{self.tokenizer.bos_token}{str(sample['text'])}{self.tokenizer.eos_token}" + encoding = self.tokenizer( + text, + max_length=self.max_length, + padding='max_length', + truncation=True, + return_tensors='pt' + ) + input_ids = encoding.input_ids.squeeze() + loss_mask = (input_ids != self.tokenizer.pad_token_id) + + X = torch.tensor(input_ids[:-1], dtype=torch.long) + Y = torch.tensor(input_ids[1:], dtype=torch.long) + loss_mask = torch.tensor(loss_mask[1:], dtype=torch.long) + return X, Y, loss_mask + + +class SFTDataset(Dataset): + def __init__(self, jsonl_path, tokenizer, max_length=1024): + super().__init__() + self.tokenizer = tokenizer + self.max_length = max_length + self.samples = self.load_data(jsonl_path) + self.bos_id = tokenizer('assistant', add_special_tokens=False).input_ids + self.eos_id = tokenizer('', add_special_tokens=False).input_ids + + def __len__(self): + return len(self.samples) + + def load_data(self, path): + samples = [] + with open(path, 'r', encoding='utf-8') as f: + for line_num, line in enumerate(f, 1): + data = json.loads(line.strip()) + samples.append(data) + return samples + + def _create_chat_prompt(self, conversations): + """构建符合ChatML格式的对话""" + messages = [] + for i, turn in enumerate(conversations): + role = 'user' if i % 2 == 0 else 'assistant' + messages.append({"role": role, "content": turn['content']}) + return self.tokenizer.apply_chat_template( + messages, + tokenize=False, + add_generation_prompt=False + ) + + def _generate_loss_mask(self, input_ids): + loss_mask = [0] * len(input_ids) + i = 0 + while i < len(input_ids): + if input_ids[i:i + len(self.bos_id)] == self.bos_id: + start = i + len(self.bos_id) + end = start + while end < len(input_ids): + if input_ids[end:end + len(self.eos_id)] == self.eos_id: + break + end += 1 + for j in range(start + 1, min(end + len(self.eos_id) + 1, self.max_length)): + loss_mask[j] = 1 + i = end + len(self.eos_id) if end < len(input_ids) else len(input_ids) + else: + i += 1 + return loss_mask + + def __getitem__(self, index): + sample = self.samples[index] + # 构建对话提示 + prompt = self._create_chat_prompt(sample['conversations']) + input_ids = self.tokenizer(prompt).input_ids[:self.max_length] + input_ids += [self.tokenizer.pad_token_id] * (self.max_length - len(input_ids)) + + # 生成动态损失掩码 + loss_mask = self._generate_loss_mask(input_ids) + + # 构建训练数据 + X = torch.tensor(input_ids[:-1], dtype=torch.long) + Y = torch.tensor(input_ids[1:], dtype=torch.long) + loss_mask = torch.tensor(loss_mask[1:], dtype=torch.long) # 对齐预测位置 + + return X, Y, loss_mask + + +class DPODataset(Dataset): + def __init__(self, file_path, tokenizer, max_length=4096): + super().__init__() + self.tokenizer = tokenizer + self.max_length = max_length + self.padding = tokenizer.pad_token_id if tokenizer.pad_token_id is not None else 0 + self.bos_id = tokenizer('assistant', add_special_tokens=False).input_ids + self.eos_id = tokenizer('', add_special_tokens=False).input_ids + with open(file_path, 'r', encoding='utf-8') as f: + self.data = [] + for line in f: + line = line.strip() + obj = json.loads(line) + self.data.append(obj) + + def __len__(self): + return len(self.data) + + def __getitem__(self, index): + item = self.data[index] + chosen = item['chosen'] # 是一个 list,里面包含若干 {role, content} + rejected = item['rejected'] # 同上 + chosen_prompt = self.tokenizer.apply_chat_template( + chosen, tokenize=False, add_generation_prompt=False + ) + + rejected_prompt = self.tokenizer.apply_chat_template( + rejected, tokenize=False, add_generation_prompt=False + ) + chosen_encoding = self.tokenizer( + chosen_prompt, truncation=True, max_length=self.max_length, padding='max_length' + ) + rejected_encoding = self.tokenizer( + rejected_prompt, truncation=True, max_length=self.max_length, padding='max_length' + ) + + chosen_input_ids = chosen_encoding['input_ids'] + chosen_loss_mask = self._generate_loss_mask(chosen_input_ids) + + rejected_input_ids = rejected_encoding['input_ids'] + rejected_loss_mask = self._generate_loss_mask(rejected_input_ids) + x_chosen = torch.tensor(chosen_input_ids[:-1], dtype=torch.long) + y_chosen = torch.tensor(chosen_input_ids[1:], dtype=torch.long) + mask_chosen = torch.tensor(chosen_loss_mask[1:], dtype=torch.long) + x_rejected = torch.tensor(rejected_input_ids[:-1], dtype=torch.long) + y_rejected = torch.tensor(rejected_input_ids[1:], dtype=torch.long) + mask_rejected = torch.tensor(rejected_loss_mask[1:], dtype=torch.long) + + return { + 'x_chosen': x_chosen, + 'y_chosen': y_chosen, + 'mask_chosen': mask_chosen, + 'x_rejected': x_rejected, + 'y_rejected': y_rejected, + 'mask_rejected': mask_rejected + } + + def _generate_loss_mask(self, input_ids): + loss_mask = [0] * len(input_ids) + i = 0 + while i < len(input_ids): + if input_ids[i:i + len(self.bos_id)] == self.bos_id: + start = i + len(self.bos_id) + end = start + while end < len(input_ids): + if input_ids[end:end + len(self.eos_id)] == self.eos_id: + break + end += 1 + for j in range(start + 1, min(end + len(self.eos_id) + 1, self.max_length)): + loss_mask[j] = 1 + i = end + len(self.eos_id) if end < len(input_ids) else len(input_ids) + else: + i += 1 + return loss_mask + + +class RLAIFDataset(Dataset): + def __init__(self, jsonl_path, tokenizer, max_length=1024): + super().__init__() + self.tokenizer = tokenizer + self.max_length = max_length + self.samples = self.load_data(jsonl_path) + self.bos_id = tokenizer('assistant', add_special_tokens=False).input_ids + self.eos_id = tokenizer('', add_special_tokens=False).input_ids + + def __len__(self): + return len(self.samples) + + def load_data(self, path): + samples = [] + with open(path, 'r', encoding='utf-8') as f: + for line_num, line in enumerate(f, 1): + data = json.loads(line.strip()) + samples.append(data) + return samples + + def _create_chat_prompt(self, conversations): + """构建符合ChatML格式的对话""" + messages = [] + answer = '' + for i, turn in enumerate(conversations): + role = 'user' if i % 2 == 0 else 'assistant' + messages.append({"role": role, "content": turn['content']}) + answer = turn['content'] + return self.tokenizer.apply_chat_template( + messages[:-1], + tokenize=False, + add_generation_prompt=True + ), answer + + def __getitem__(self, index): + sample = self.samples[index] + # 构建对话提示 + prompt, answer = self._create_chat_prompt(sample['conversations']) + + return { + 'prompt': prompt, + 'answer': answer + } + + +if __name__ == "__main__": + pass diff --git a/model/minimind_tokenizer/merges.txt b/model/minimind_tokenizer/merges.txt new file mode 100644 index 0000000..767c649 --- /dev/null +++ b/model/minimind_tokenizer/merges.txt @@ -0,0 +1,6142 @@ +#version: 0.2 +Ġ t +Ġ a +i n +h e +r e +ï ¼ +ä ¸ +o n +a t +ç ļ +çļ Ħ +ï¼ Į +Ġ s +Ġ c +n d +ã Ģ +e r +Ġt he +e s +e n +o r +a n +Ġa nd +in g +Ġ p +i t +a l +ãĢ Ĥ +Ġ o +Ġ w +ä » +Ġt o +i s +o u +Ġ m +ä º +Ġ in +Ġ f +Ġ b +e d +i on +å ı +i c +Ġ d +Ġo f +l e +a r +r o +Ġ Ġ +å ħ +en t +æ ľ +Ġ e +å Ĵ +è ¿ +ä ½ +åĴ Į +æ Ī +å ® +å Ī +v e +u s +Ġ re +Ġ h +Ġt h +a s +c t +ç Ķ +o m +å ľ +å ¤ +æ ĺ +å Ĭ +å IJ +ä¸ Ģ +i m +è ¯ +æ ĸ +at ion +l o +ç » +Ġb e +ãĢ ģ +i d +Ġc an +i l +æĺ ¯ +ä ¹ +è ® +Ġ A +Ġth at +Ġ T +ä» ¥ +c h +Ġ y +c e +ï¼ ļ +o t +er s +Ġ n +é Ģ +r a +å ° +Ġ g +Ġy ou +å Ń +Ġp ro +e t +å º +åľ ¨ +l y +Ġ is +ä¸ ª +Ġ l +u r +Ġf or +åı ¯ +é ĩ +s t +çļĦ æ +u t +Ġ he +i f +ĥ ½ +ä ¼ +Ġ I +è ¡ +i r +it h +å ¹ +Ġa re +i g +Ġs t +e l +o l +å ¸ +u l +æ Ŀ +æĪ ij +Ġ on +è ¦ +æľ ī +æ Ĺ +å ¯ +è § +è¦ ģ +Ġ us +a y +æ ķ +ç ī +o w +m ent +çĶ ¨ +es s +ä¸ Ń +ä» ¬ +äº º +å ĩ +Ġe x +ĠĠ ĠĠ +å Ľ +å Į +å ¼ +Ġc on +s e +è ĥ½ +ç İ +Ġa n +Ġw ith +ä¸ º +at e +i v +a m +Ġa s +u re +è¿ Ļ +å Ĩ +ç Ń +Ġ or +å · +Ġa l +i es +ç § +Ġ im +æ Ģ +v er +a b +äº Ĩ +Ġs u +Ġd e +g e +t h +åı¯ 以 +è Ģ +ä¸ į +å ¾ +ĠA I +Ġ en +é Ĺ +æ ī +a k +i ve +Ġm o +å ¥ +é Ŀ +ç Ľ +it y +ä ¿ +u n +è ´ +å į +Ġ it +Ġim p +e ct +æ ł +å ½ +è ĩ +é ¢ +å ĵ +æ ³ +or t +a d +æ ŀ +e m +Ġc om +å ¦ +he r +e re +Ġ S +i al +Ġ C +ĠT he +ç IJ +çĶ Ł +æ Ħ +p p +æ Ń +æĸ ¹ +q u +Ġw h +å¦ Ĥ +é ľ +an t +Ġ le +Ġ v +æ ĭ +æ Ĭ +us t +æĹ ¶ +çŃ ī +å ij +å¯ ¹ +t er +l d +è¡ Į +Ġc h +u d +éľ Ģ +æ ° +æĪ IJ +Ġ | +a c +a in +i z +æ ı +ion s +Ġh a +æ Ľ +- - +æĿ ¥ +om e +å ¿ +' s +Ġn e +es t +ä ¾ +u m +åĪ ° +åľ ° +is t +â Ģ +çī © +ä¸Ģ 个 +l p +æ İ +èĩ ª +Ġhe lp +Ġthe ir +æ Ķ +ä½ ľ +ä¼ ļ +æ Į +æĪij 们 +n t +äº İ +åĪ Ĩ +re s +p e +åĩ º +id e +æ ĥ +Ġ H +è ¾ +Ġ M +f f +æ ¯ +o d +ic al +Ġw or +ä¸ Ĭ +a re +æĽ ´ +Ġyou r +ä¸ ĭ +è µ +ation s +æķ ° +Ġt e +å İ +çIJ Ĩ +ĠT h +è¿ ĩ +å¹ ¶ +d u +éĿ ¢ +Ġa d +il l +æ µ +å¥ ½ +o c +a ct +éľĢ è¦ģ +ä» ĸ +å ± +Ġ r +Ġmo re +åŃ ¦ +ç ® +ig h +äº Ľ +Ġ B +åĬ ¨ +åĵ ģ +è ī +p le +Ġin c +åIJ Į +Ġex p +ou ld +ä½ ł +æ į +æı IJ +å¤ § +çİ ° +p t +Ġ P +al l +åĬ ł +ç§ į +Ġs e +åĬ Ľ +ou t +Ġha ve +ç º +ä½ ĵ +Ġpro v +åĮ ĸ +å¤ ļ +å® ļ +Ġus ed +éĢ ļ +c c +è¿ Ľ +æ ´ +Ġs h +Ġa b +o s +Ġre s +ĠTh is +ç ¨ +æĢ § +a ge +r i +æ ¸ +ab le +åŃ IJ +Ġb y +åı ij +éĩ ı +åº Ķ +Ġ lo +ä½ ¿ +åħ ¶ +é « +é Ļ +é« ĺ +åº ¦ +è§ £ +é £ +å° Ĩ +æ³ ķ +a nd +ä¿ Ŀ +an s +f or +ro m +re at +Ġp l +çļĦ ç +å¸ ¸ +è ½ +Ġw e +è¡ ¨ +ak e +æĪ ĸ +é¢ ĺ +å Ł +Ġm e +æĸ ĩ +t her +k e +å® ¶ +åIJ Ī +æľ Ģ +in e +Ġs ome +ç ± +éĩ į +æŀ ľ +Ġ W +Ġ E +é ĺ +ou r +r ou +ç Ĥ +æ ± +åħ ³ +Ġin t +an ce +ä¹ Ł +é ģ +ĠĠ Ġ +å® ĥ +a g +æ ¬ +0 0 +è ° +ul t +y st +éĹ ´ +ç ³ +Ġt r +p l +ar t +æĦ Ł +æ Ĥ +at a +Ġ F +for m +è® ¡ +Ġf rom +Ġ D +éĹ ® +igh t +c es +æį ® +lo p +ä¹ ĭ +Ġf e +å ģ +ve lop +Ġ 1 +åĽ ł +k s +æ ² +Ġ u +å° ı +yst em +Ġd is +Ġ R +g y +å· ¥ +ç¨ ĭ +å ¢ +en ce +è Ĥ +ç ¡ +Ġt ra +å » +åħ ¥ +ig n +al th +Ġsu ch +a ch +æ Ļ +ar n +Ġd ata +è ¶ +å® ŀ +s o +Ġde velop +ç ¤ +Ġa cc +as t +èĢ Į +Ġ " +Ġo ther +å» º +Ġe ff +ç « +Ġm an +åħ ¬ +å Ģ +ç Ħ +m s +å¼ ı +èī ² +å¾ Ĺ +if ic +Ġ j +Ġ ro +Ġh as +ch n +o lo +åĪ ¶ +è Ĭ +使 ç͍ +ou s +u al +Ġa t +Ġe m +el l +Ġs ystem +Ġhe alth +it ies +Ġex am +i b +é Ķ +Ġab out +äº § +åIJ İ +æĦ ı +ç± » +Ġp re +æĤ ¨ +Ġal so +ent s +Ġin d +in d +éĢ Ĥ +Ġte chn +res s +æĥ ħ +éĹ® é¢ĺ +Ġus e +ï¼ Ł +Ġinc l +Ġs pe +ic h +p s +æľ º +Ġthe y +i e +Ġh ow +Ġwor k +ä¸ ļ +ç ´ +Ġimp ro +Ġle arn +æĸ ° +çĤ ¹ +Ġcon t +ar d +çĦ ¶ +æľ ¬ +ç³ » +ç¡ ® +è® ¾ +åħ · +éĢ ī +èĢ ħ +é ħ +g h +_ _ +Ġn ot +ç ľ +çĽ ¸ +Ġprov ide +å ī +ion al +Ġen s +ä¸ İ +è´ ¨ +ent ial +ç» ı +å¿ ĥ +an g +æŃ ¤ +e nd +Ġp o +è¿Ľ è¡Į +ic e +Ġ - +Ġw ay +å· ± +Ġ 2 +im e +ç ½ +èĩª å·± +Ġ un +b ot +Ġincl ud +at ed +æ° ´ +é ķ +æĮ ģ +ä» £ +é ¡ +æī Ģ +ç Ŀ +pp ort +o od +i ke +r u +Ġcom m +Ġ L +ä¿ ¡ +Ġ G +ç Ł +çĶ µ +Ġw as +lo w +er v +åĮ ħ +ĠĠĠĠ ĠĠĠĠ +Ġw he +d it +Ġwh ich +Ġcom p +é ª +o re +ç ¾ +Ġ = +çī ¹ +if f +er t +æ ģ +r it +Ġre c +åĨ ħ +æĺ İ +or s +Ġp at +-- -- +æ Ł +Ġa pp +n s +åĬ ¡ +al y +a ce +æ´ » +ä¾ Ľ +a v +ä¸ » +Ġp ers +ç ĥ +è¯ ¥ +Ġm y +ç © +er i +è® © +æĬ Ģ +éķ ¿ +ac k +Ġ N +Ġd iff +Ġth is +å Ŀ +Ġens ure +å½ ĵ +Ġo ut +Ġc l +Ġ k +é ¦ +ou nt +çİ ¯ +åĬ © +Ġtechn olo +Ġthe se +f ul +é ļ +æ · +ä¸Ģ äºĽ +Ġs oc +å¼ Ģ +å¤ © +Ġe v +Ġre du +Ġthe m +Ġ ( +é ĥ½ +æĪ · +è · +åľ º +æ° Ķ +Ġ Y +è¯ Ń +éĢļ è¿ĩ +å± ķ +Ġc o +å½ ± +ç ¬ +Ġan aly +æ¯ Ķ +åħ ¨ +Ġimpro ve +ç» ĵ +å¹ ´ +ç ķ +çĿ Ģ +Ġh um +Ġ qu +ç® Ĺ +Ġ O +é£ Ł +il ity +Ġsystem s +åı ĺ +a il +ç ¼ +ç ł +è¿Ļ 个 +æıIJ ä¾Ľ +as e +å ŀ +ment s +Ġp ot +Ġan y +ä½ Ĩ +Ġcon s +ĠI t +æł ¼ +Ġa r +æľ ¯ +éĿ ŀ +Ġd o +Ġm ay +æĭ © +u e +éĢī æĭ© +r y +é ĥ +Ġl ike +on g +è ģ +` ` +i le +æ± Ĥ +Ġne w +i ent +Ġimp act +è¿ ĺ +æ³ ¨ +ä¹ Ī +çĽ ® +âĢ ľ +âĢ Ŀ +e f +ä¾ ĭ +Ġpot ential +o k +åı¯ èĥ½ +Ġtr ans +Ġa ct +ï¼ ī +Ġspe c +æ ¶ +Ġw ill +äº ¤ +iz e +ç¾ İ +å¸ Ĥ +Ġst ud +p on +è º +ä¸į åIJĮ +on e +å¾ Ī +åı Ĭ +å¦Ĥ æŀľ +çIJ ĥ +an ge +Ġne ed +å¤ ĸ +et y +ak ing +è¯ · +at er +Ġpers on +id ent +Ġs o +Ġm ake +å¹ ³ +å¤ Ł +èº « +ï¼ Ī +Ġin form +æ ¡ +äº ĭ +åı Ĺ +as ed +il d +Ġof f +Ġthe re +c is +è ¢ +éĥ ¨ +æ¯ ı +ra ct +as s +Ġlearn ing +å ĸ +å½ ¢ +i re +ä» İ +bot s +è Ļ +å¸ ® +Ġd es +ĠI n +c ess +Ġp e +if y +Ġwh o +ä¹ ł +æľ Ł +Ġexp eri +é Ĥ +Ġs c +e p +ä½ ķ +Ġt ime +éĿŀ 常 +æĭ ¬ +å ķ +以 ä¸ĭ +éģ ĵ +Ġcomm un +Ġc ould +a p +è IJ +è° ĥ +l ic +du ct +Ġit s +c y +è¯ ´ +Ġm ed +Ġc ol +ul ar +éĩį è¦ģ +Ġs p +åĪ © +èµ · +Ġprov id +ic es +å Ļ +æĸ Ļ +Ġimp ort +ur al +åŃ Ĺ +Ġu nd +in t +Ġo ver +åı ¸ +æł ¹ +é ¥ +pl es +ä»ĸ 们 +g ra +ur ing +n ow +åį ķ +è¿Ļ äºĽ +åī į +å® ī +Ġp r +åĮħ æĭ¬ +ç» Ļ +T he +ä½ į +å § +ç´ ł +åij ĺ +Ġ ident +åŀ ĭ +Ġad d +å¼ º +æĺ¯ ä¸Ģ +i p +g or +Ġsu pport +n e +Ġdiff ere +åħ ĥ +Ġas s +åĨ ³ +é Ľ +åIJ į +Ġg o +Ġtechnolo gy +æĢ » +è® ® +Ġin ter +Ġin v +Ġo ur +æķ Ī +ust om +Ġre l +if e +åĻ ¨ +ing s +ä» · +Ġp art +è¢ « +æī ĭ +ar y +Ġres pon +Ċ ĠĠĠ +好 çļĦ +at ive +帮 åĬ© +ç» Ł +æĶ ¾ +ĠH ere +ç ģ +Ġb ut +æģ ¯ +æŃ £ +ar k +åħ¬ åı¸ +or y +å¢ ĥ +le ct +é Ł +æĥ ³ +é£ İ +at ing +Ġa m +it s +æ » +gor ith +åĵ į +ure s +Ġeff ect +Ġsh ould +Ġp er +è ± +ç ² +ic t +Ġal gorith +u c +rou gh +ä» » +ä» ¶ +Ġbe t +i a +Ġanaly z +æł¹ æį® +iz ed +æµ ģ +è§ Ĥ +è £ +æł ĩ +ir on +Ġc ustom +Ġre g +Ġperson al +èĥ½ å¤Ł +ic s +iv id +ç Ī +èµ Ħ +æŃ ¥ +å® ¹ +åĪ Ľ +è Ī +ä¹ IJ +å¯ ¼ +g an +èĬ Ĥ +Ġal l +en s +am e +n ess +Ġu p +Ġ U +èĢ ĥ +el f +åĢ ¼ +å° ij +æľ į +ar i +th ical +v iron +è ĥ +or d +Ġs ign +éĩ Į +ou nd +o ple +åŁ º +Ġinform ation +Ġident ify +åĽ ŀ +Ġc re +éŁ ³ +ib le +u b +è¿ IJ +Ġle ad +æ¸ ¸ +æ¬ ¡ +åĨ Ļ +éĤ £ +g et +è į +Ġexam ple +ä¼ ĺ +å½± åĵį +is h +x t +æ º +éª Į +o b +å® ¢ +å¤ ĩ +åģ ¥ +è½ ¦ +ç¤ ¾ +ivid ual +ere d +l es +Ġen viron +Ġpe ople +æĺ Ł +ç ĸ +ç ĭ +Ġd et +æĹ ł +Ġ if +o se +it e +å¢ ŀ +é Ĵ +åIJĮ æĹ¶ +è¿ ° +æĸ¹ å¼ı +åĽ ½ +é » +å¤ Ħ +Ġexam ples +æ ® +Ġint o +æĮ ĩ +Ġhum an +åIJ ij +ç¤ º +æķ° æį® +Ġ 3 +Ġ J +è ı +çݯ å¢ĥ +al s +ers t +Ġe thical +ç» Ħ +ä¼ ł +Ġdiffere nt +Ġk now +åº ı +Ġind ividual +æıIJ é«ĺ +rou nd +å° ± +åı ĸ +åŃ ĺ +ä¸ ¤ +çŁ ¥ +our ces +c k +å £ +in es +è¾ ¾ +Ġman y +æķ ´ +æł · +dit ional +om m +çĶ ± +éĢ ł +å®ĥ 们 +u es +Ġm ent +Ġimport ant +Ġo pt +Ġlo c +p h +Ġpro cess +Ġalgorith ms +设 计 +Ġsoc ial +ver y +åĪ Ļ +ä¾ĭ å¦Ĥ +è® ¤ +Ġa ut +Ġs erv +g g +产 åĵģ +è§ Ħ +çľ ĭ +ve l +æĸ¹ æ³ķ +Ġb en +åĽł æŃ¤ +c are +p er +åĬ Ł +建 è®® +Ġp os +æ ¤ +w e +åĮ º +i qu +Ġre al +æĹ ¥ +Ġredu ce +a f +ang u +Ġs k +Ġ ed +erst and +åĨ µ +m ot +åħ Ī +ç ¥ +åºĶ 该 +Ġth rough +Ġcon c +åıij å±ķ +è¯ ķ +æ¡ Ī +Ġenviron ment +åı £ +Ġad v +åĪ « +Ġben ef +æ¸ ħ +åij ³ +åħ ī +Ġdevelop ment +en g +å¦Ĥ ä½ķ +ç® ¡ +iv ers +åIJ Ħ +Ġr is +ro w +er gy +计 ç®Ĺ +ä¿¡ æģ¯ +Ġpro duct +è¾ ĥ +è® º +èĩªå·± çļĦ +æĬ ¤ +åı į +åħ¶ ä»ĸ +åĪ Ĺ +ç» Ĩ +ç© º +Ġg reat +e ar +æº IJ +j ect +çĶŁ æ´» +ä¸Ń çļĦ +Ġund erstand +è ĭ +h at +Ġpro gra +ç Ĭ +éĩ ij +Ġinclud ing +Ġacc ess +ĠĠĠĠ ĠĠĠ +è¯ Ĩ +ç ¦ +o g +è£ ħ +Ġar t +Ġw rit +Ġinc re +Ġp h +æĸ¹ éĿ¢ +Ġp ract +Ġus ing +é¡ ¹ +æİ ¥ +Ġway s +Ġl angu +æĶ ¯ +Ġch all +åİ » +__ __ +im ate +æĸ Ń +è ¨ +Ġw ell +l l +Ġp ol +æĢ ģ +Ġ ra +C an +åİ Ł +b er +è¨ Ģ +ç« ĭ +Ġg en +éħ į +æ· ± +t e +ä¸ ī +ç§ ij +ĠF or +çº ¿ +ç ħ +æ ¼ +åķ Ĩ +æĿ IJ +Ġsign ific +Ġg u +Ġde cis +Ġtra in +Ġa g +Ġc reat +å® Į +æĹ¶ éĹ´ +Ġon e +è Ħ +Ġn at +åѦ ä¹ł +çļĦæ ķ +c ed +Ġwhe n +Ġb i +è İ +æĽ´ åĬł +iv es +p ort +å·¥ ä½ľ +v ing +Ġbe en +æĻ º +Ġl ife +å¼ ķ +ar m +çİ ĩ +ç͍ æĪ· +ä¹ ī +ä» ½ +è¯ Ŀ +in ess +c om +åº · +åĩ ı +ä» Ģ +è¾ ĵ +Ġv ari +c on +Ġmo d +ä»Ģ ä¹Ī +Ġen ergy +æĬĢ æľ¯ +ert ain +m m +ver all +åĪ Ĵ +Ġro bots +Ġor gan +æİ ¨ +ant s +åĩ Ĩ +d s +æŀ ģ +ç Ļ +Ġre qu +Ġ ess +ç® Ģ +ust ain +æ ¨ +Ġst r +c ing +ab ility +re e +Ġed uc +åİ Ĩ +Ġcre ate +åģ¥ åº· +Ġdes ign +i ps +åģ ļ +èĬ ± +in k +èı ľ +æī ¾ +æ® µ +æµ ĭ +Ġ V +ĠB y +å Ķ +é¦ ĸ +è¯ į +Ġwhe re +Ġdis c +äºĨ è§£ +r ic +ä¸ Ķ +è¶ ³ +æĺ¯ ä¸Ģ个 +ar ch +ç§ ¯ +å¸ ¦ +Ġwh ile +Ġsignific ant +çł ģ +æĪ ¿ +Ġbe ing +Ġlangu age +it ive +2 0 +Ġanalyz e +æĻ ¯ +è Į +ri b +æ¨ ¡ +ĠS t +è´ ¹ +' t +Ġhealth care +Ġexperi ence +Ġ 5 +个 人 +ay s +è± ¡ +p lo +Ġw ould +èĻ ij +æĶ ¶ +é¢ Ħ +é¢ Ĩ +ä¿Ŀ æĮģ +en ces +åı ª +èĩ ´ +æĪ ı +Ġment al +Ġfe w +at es +è¿ĩ ç¨ĭ +å®ī åħ¨ +Ġs ustain +Ġw ere +å¤ ª +ç Į +Ġspec ific +Ġwor ld +çŃ Ķ +`` ` +Ġt ake +åħ » +éĢ Ł +e ver +S S +éĶ Ģ +Ġb o +he s +Ġm us +æľį åĬ¡ +è§ Ĵ +t en +æŀ IJ +p ow +d ict +v ent +1 0 +çļĦæ Ĺ +ĸ çķ +Ġpro t +ç½ ® +Ġh igh +Ġb us +Ġind ust +åIJ ¦ +c ial +人 们 +ĠA s +åij Ĭ +ad e +æĶ ¹ +ç Ĺ +Ġh ad +Ġhe r +Ġj ust +ï¼ Ľ +è´ Ń +ç¬ ¬ +é ĵ +Ġw ater +Ġf ood +éĺ Ł +a us +Ġchall eng +åħ į +æĸĩ åĮĸ +Ġmo st +é ¸ +ç½ ij +çĽ ´ +Ġs m +Ġact iv +plo y +O verall +å¿ « +ru ct +Ġindividual s +å§ ĭ +g ies +æŁ ¥ +çĪ ± +i ety +I n +åĪĨ æŀIJ +è§ Ĩ +æ¸ © +ç» ´ +ol ut +åŁ Ł +omm end +Ġcom ple +æķ Ļ +Ġb u +Ġeduc ation +at her +Ġ 4 +t ing +Ġf ind +æ² ¡ +Ġh is +ä¹ĭ éĹ´ +Ġeffect ive +Ġat t +Ġre se +èĥ½ åĬĽ +åŁ İ +Ġal low +Ġa v +Ġpro mot +æĻº èĥ½ +æ» ¡ +åħ ± +ie w +c ome +ç³» 绣 +Ġrespon s +äº Ĵ +Ġc ult +pow ered +Ġrec ommend +èIJ ¥ +O SS +Ġch ange +è¯ ģ +v ed +æİ Ĵ +è§£ åĨ³ +ic i +ĠH ow +Ġfe el +æľ Ī +Ġwh at +以 åıĬ +Ġse e +åŃ © +b s +Ġs ur +æ £ +al ity +Ġv is +ç¡® ä¿Ŀ +p ect +å®ŀ çݰ +Ġc are +å¹ ¿ +ill s +åº Ń +as es +å¤ į +åºĶ ç͍ +çļĦæ ĥ +ard s +Ġadd ress +Ġcomp an +Ġinv ol +Ġcustom er +åĽł 为 +Ġstud ents +Ġin s +注 æĦı +æŀ Ħ +æ¬ ¢ +æµ · +åı Ĥ +èĩª çĦ¶ +é © +ĠThe se +w n +æĺ ĵ +çĬ ¶ +re n +Ġt reat +Ġbenef its +Ċ ĠĠĠĠĠĠĠ +对 äºİ +æĢ Ŀ +id er +ĠY es +Ġ K +åĸ ľ +Ġ ke +Ġen g +Ġpo p +o st +p are +Ġm on +æ¬ ¾ +ĠM OSS +Ġem ot +Ġa c +ç¼ ĸ +f ore +åı ¥ +Ġv al +il y +Ġis s +èĤ ī +èĩ ³ +游 æĪı +we en +Ġinclud e +Ġprot ect +åħ³ ç³» +éĻ © +Ġse ver +Ġth an +éľĢ æ±Ĥ +ç» ĥ +ĠThe y +is s +y s +Ġj ob +éĺ ³ +æ IJ +Ġbet ween +Ġm ach +---- ---- +èĢĥ èĻij +è´¨ éĩı +Ġbus iness +w or +ic k +e g +åħ ħ +ç ¯ +æĿ ¡ +n er +a pt +Ġapp ro +Ġpl ay +没 æľī +¤ IJ +æľ ª +æĪ ĺ +å®¶ åºŃ +ãĢ ĭ +en cy +ĠC h +ãĢ Ĭ +Ġprovid ing +Ġres ources +âĢ Ļ +Ġass ist +Ġnat ural +è¯ Ħ +ä¾ ¿ +Ġs af +åħ· æľī +è° ¢ +çĥ Ń +s s +et h +ol d +Ġper form +Ġsever al +é ¤IJ +Ġe ach +è½ ¬ +c i +Ġt y +Ġp ub +æ´» åĬ¨ +oc us +çī Į +è¶ Ĭ +åĽ ¢ +è½ » +è¯Ń è¨Ģ +Ġare as +éĩ ĩ +f t +ri end +å· ² +å¸Ĥ åľº +it ion +i ents +管 çIJĨ +è® ¸ +人 ç±» +身 ä½ĵ +iqu e +Ġpart ic +ç» Ń +age ment +v es +ç¬ ¦ +l ine +çº ¢ +åIJ ¸ +Ġpat ter +00 0 +社 ä¼ļ +åĨħ 容 +Ġorgan iz +ou gh +Ġ ve +åŃ© åŃIJ +æĸ ½ +æ¤ į +åĩ ł +ä½Ĩ æĺ¯ +Ġa ff +Ġn um +le ment +èī º +è ij +Ġc ar +ag es +ab or +æĺ¯ä¸Ģ ç§į +Ġin st +è Ľ +ä¹ĭ ä¸Ģ +è· ¯ +åį ³ +Ġm ain +éļ ı +H ow +å¿ ħ +ç¨ĭ åºı +éŁ³ ä¹IJ +re d +æ² ¹ +Ġoff er +et s +ç ¢ +Ġd uring +çļĦ 人 +æĽ´ å¤ļ +Ġd i +代 çłģ +èİ · +åħ ĭ +Ġgu id +主 è¦ģ +Ġf am +æİ § +éĢļ 常 +ĠA d +å¤Ħ çIJĨ +ur n +ow er +åij ½ +æı ı +Ġsk ills +Ġto ol +w are +æĸĩ æľ¬ +Ġpatter ns +缮 æłĩ +ac y +æī ĵ +åŁİ å¸Ĥ +Ġe very +r ies +è¯ » +éģ ¿ +çĻ ½ +éĢĤ åIJĪ +Ġpat ient +çľ Ł +ot h +å¥ ¹ +åĶ ® +ä¸Ģ ç§į +Ġm ade +ä½ İ +is e +Ġre m +æ¶ Ī +åIJ « +a ir +Ġgen er +o y +ç² ¾ +æĥħ åĨµ +ight s +Ġexp l +è§ ģ +Ġpre dict +ç± ³ +æĽ´ 好 +ä¿ ® +Ġcl imate +Ġf ocus +Ġg row +客 æĪ· +ä¸į æĸŃ +it or +ĠE n +çº ¦ +æĺ¯ åIJ¦ +ä» ħ +æĪij们 çļĦ +æľ Ľ +o p +Ġm aking +y th +cc ess +Ġo wn +gg est +Ġt as +ut ure +Ġmod el +p ut +Ġrese arch +ere st +éļ ¾ +Ġ [ +i el +ation al +Ġcommun ic +ç¥ ŀ +ç© ¶ +Ġre st +æĪIJ 为 +k ing +p r +åĮ » +c ur +èĤ ² +Ġ ' +è¿Ļ ç§į +ç¯ ĩ +Ġc he +ow n +éĻ ħ +Ġf in +åζ ä½ľ +Ġsu ggest +å¢ŀ åĬł +Ġmed ia +rib ut +çļĦæĥ ħ +åĬł åħ¥ +Ġc le +åij ¨ +ç« ł +Ġth ink +Ġloc al +pport un +ĠY ou +Ġpl an +Ġev en +éĽ Ĩ +å· § +a x +Ġchalleng es +Ġpro f +ĠC an +Ġconc er +Ġf uture +åĬ ¿ +Ġre f +èģ Ķ +Ġs elf +æĪĸ èĢħ +b le +åĽ ´ +è¿IJ åĬ¨ +Ġin f +éĩ Ĭ +Ġsustain able +Ġte xt +Ġg ra +äº Į +åĵģ çīĮ +ä¸įåIJĮ çļĦ +l ed +çĭ ¬ +Ġo pportun +Ġcont in +y m +Ġg et +å¯ Ĩ +éĻ ¤ +æ ħ +éģ¿ åħį +Ġ + +è§ ī +Ġre t +å¸ ĥ +Ġint erest +Ġsoc iety +ç»ĵ æŀľ +åIJ ¬ +é¦ĸ åħĪ +Ġb re +Ġ2 0 +ĠHow ever +è® ° +on s +è¿ ij +å¼Ģ å§ĭ +Ġbu ild +Ġbe h +' m +v ers +Ġg ood +çIJĨ è§£ +res ent +ç¦ » +åĬŁ èĥ½ +Ġeff ort +l abor +é» ij +Ġbet ter +Ġre ad +å¾ ĭ +èĽ ĭ +he d +ä¹ ° +导 èĩ´ +Ġimp lement +ç ¿ +äº « +å¤ ´ +en se +Ġl ong +ot her +é¥ ® +åŃĺ åľ¨ +çļĦæ Ħ +ä¸Ģ 份 +yth on +n ing +åĩı å°ij +åĢ Ļ +ä¸ ĵ +åIJĦ ç§į +è ħ +å° ½ +åį ĩ +æĬ ¥ +Ġpub lic +Ġl ar +ä½ł çļĦ +a ut +é¢Ĩ åŁŁ +æ ļ +ol low +èģ Į +Ġch ang +Ġb est +h ip +åĨ į +ak es +Ġch at +it ed +Ġp ower +ä¿Ŀ æĬ¤ +ä¹ ¦ +计 åĪĴ +éĩįè¦ģ çļĦ +åıĺ åĮĸ +il ities +Ġcons ider +æĪij们 åı¯ä»¥ +éĤ£ ä¹Ī +Ġ ide +æ¼ Ķ +ag ing +Ġb ased +å® Ŀ +Ġr ange +Ġres ult +Ġm em +çħ § +Ġle vel +c ou +Ġb r +T h +ä¼ ģ +建 ç«ĭ +Ġun ique +è® Ń +Ġm ark +许 å¤ļ +è¡Į 为 +Ķ ç©¶ +çļĦæ Ĭ +Ġs et +éª ¤ +t s +Ġh ist +Ġa round +Ġre v +åħ¶ ä¸Ń +ï¼ ģ +æıı è¿° +æľĢ åIJİ +Ġs im +n ect +åĽŀ çŃĶ +éĺ ² +èī ¯ +åΰ äºĨ +ä¸ ĸçķ +æĸ¹ æ¡Ī +æĿIJ æĸĻ +ä¸ĸçķ Į +æĽ´å¥½ åľ° +两 个 +Ġem ploy +Ġtr y +æ ĵ +Ġb ack +åĪ ĩ +Ġsu ccess +Ġdecis ions +Ġth ose +å¯ Į +Ġf act +æİ ¢ +è¶ £ +Ġpract ices +åIJ Ĺ +æī į +çİ © +pt ion +æĸĩ 竳 +Ġfe at +Ġpre vent +Ġwrit ing +çļĦæ Ģ +Ġn o +ä» ĭ +éĹ ¨ +Ġd el +æ Ĵ +Ġopt im +in ation +Ġ Ċ +us ion +Ġacc ount +l ing +Ġd ivers +. " +at h +èĭ ± +ä¼ģ ä¸ļ +Ġg rou +åľ° çIJĥ +å¤ ± +Ġpersonal ized +ĠH e +表 è¾¾ +cur ity +Ġf ollow +产 çĶŁ +Ġe ar +åİ ĭ +ver n +Ġiss ues +åĿ ĩ +é ² +Ġd r +iv ing +Ġtrain ing +Ġris k +åĩ ½ +åı ² +æ ij +çļĦæĹ ¶ +og n +Ġrequ ire +Ġenvironment al +b ack +éĶ ® +çĸ Ĺ +Ġinter act +åĽ¢ éĺŁ +æ¯ı 个 +çĦ¶ åIJİ +Ġd ist +ç͍ äºİ +认 为 +åĩ½ æķ° +Ġs ent +Ċ ĠĠĠĠĠĠĠĠ +Ġredu cing +å¹ ² +Ġre p +Ġc aus +Ġmus ic +ç ª +Ġmon itor +Ġfor m +é¢ ľ +çĹ ħ +é¦ Ļ +Ġof ten +åı¯èĥ½ ä¼ļ +åijĺ å·¥ +Ġha nd +æĬ ķ +Ġneed s +æŃ¤ å¤ĸ +åı ĭ +iv ity +Ġactiv ities +åĸľ 欢 +Ġp ur +i an +s elf +åĬ¨ çī© +com es +å © +Ġpr iv +a z +Ġrel ations +Ġmach ine +çļĦæ ° +ä»· æł¼ +ä»· å̼ +ç´ ¢ +Ġfe ed +ä¸Ģ ä¸ĭ +Ġte am +Ġindust ry +è´ ¢ +ĠP ro +Ġw ant +ç§ ° +Ġcl ass +Ġlo ve +åħ³ äºİ +è¾ĵ åħ¥ +Ġtrans port +Ġcomple x +Ġy ear +éĶĢ åĶ® +å¯ » +i ence +ist s +æĶ¯ æĮģ +Ġm ind +Ġf un +Ġch ar +æĮ ī +Ġconcer ns +con om +ç®Ģ åįķ +以ä¸ĭ æĺ¯ +Ġst art +å¹¶ ä¸Ķ +av i +ä¸Ń åĽ½ +åħĥ ç´ł +Ġcon f +Ġpos itive +Ġc ur +Ġc ount +er y +å ¡ +å® ¤ +Ġco st +Ġe qu +Ġpol ic +ast e +a w +éħ Ĵ +cou ra +iv en +pl ace +ch ie +çļĦæķ ° +åĽł ç´ł +Ġf l +is m +Ġmed ical +Ġhum ans +Ġaut om +ertain ly +Ġ 0 +Ġoff ers +Ġdet ect +Ġ 6 +é£İ æł¼ +Ġsh ow +çģ « +Ġan im +é¢ľ èī² +le ase +a ve +åĵ ª +ĠThe re +以 ä¸Ĭ +æľª æĿ¥ +X X +çī ĩ +u ch +Ġtas ks +åħ· ä½ĵ +æ¤į çī© +Ġm in +èīº æľ¯ +ic ult +Ġexperi ences +æİ§ åζ +b e +Ġpat ients +å ² +ĠW e +Ġrec ogn +çĥ ¤ +Ġsm all +åĿ Ĺ +å Ħ +太 éĺ³ +ct ion +Ġ ent +æį ¢ +Ġbe fore +Ġbe come +å·² ç»ı +表 çݰ +Ġexp lo +Ġa chie +ä»» åĬ¡ +大 çļĦ +Ġd ay +Ġf ound +å± ± +on d +Ġtreat ment +pe nd +he n +Ġcon dit +ç¡® å®ļ +Ġbusiness es +ĠW h +æīĢ æľī +Ġdevelop ed +ç» Ī +æŃ¥ 骤 +Ġdiff icult +åı · +ĠR e +éĶ Ļ +Ġch o +Ġqu est +Ġtrans pare +Ġpro ject +Ġcommun ity +o v +å¸ Ī +å¼ ł +åĪĨ ç±» +人 çļĦ +s is +çĽ Ĭ +o id +ĠA n +w ays +Ġe as +Ġaff ect +Ġother s +Ġreg ul +æĢ§ åĴĮ +åĸ Ħ +ag n +ä½ľ 为 +åı¯ä»¥ 帮åĬ© +åĦ ¿ +Ġorganiz ations +é¸ ¡ +åħ ´ +Ġf riend +Ġ $ +Ġdet ail +Ġtra ditional +Ġdesign ed +è´Ń ä¹° +ä½ĵ éªĮ +ç» į +er m +Ġcon nect +è¿Ļ æł· +Ġrecommend ations +Ġb oth +Ł éĢļ +æ¯ į +Ġs it +ä½ľ ç͍ +ä»ĭ ç»į +Ġst e +ĠS ure +åı ° +æĤ¨ çļĦ +Ġs he +Ġman agement +j oy +è´ Ł +Ġpromot e +Ġvari ous +( " +p or +Ġs ens +Ġess ential +get her +ular ly +äº ī +ir st +Ġo p +Ġspec ies +çݰ åľ¨ +ch o +Ġbeh avi +çŃ ij +å¥ ³ +Ġqu ality +Ġex t +è ¥ +å®Į æĪIJ +æĢ» ä¹ĭ +éĥ¨ åĪĨ +ä»İ èĢĮ +åĽ ¾ +Ġty p +Ġstr ate +è¥ ¿ +Ġhe re +ar s +å¸ Į +çļĦæ Ŀ +å° Ŀ +e e +i er +Ġe c +ical ly +er ing +å¿ µ +ĠD e +Ġne g +建 çŃij +Ġserv ices +Ġab le +im es +Ġopt ions +缸 åħ³ +Ġsu b +Ġdecis ion +ĠC ertainly +Ġ åľ¨ +æ ¢ +Ġserv ice +) : +带 æĿ¥ +Ġch ild +è§£ éĩĬ +ir t +ç Ĩ +ä¸į ä»ħ +æĿ ¾ +积 æŀģ +r on +åı ¤ +çł Ķç©¶ +ç² ī +h or +Ġprof ess +çļĦ éĹ®é¢ĺ +Ġopportun ities +åİĨ åı² +Ġde f +ĠA m +Ġg r +a ur +å± Ĥ +çŃ ĸ +Ġpop ular +æ´ ģ +åıij çݰ +Ġpo em +èµ Ľ +Ġo b +Ġd on +Ġs ound +Ġtransport ation +i ous +åı ¦ +Ġro le +Ġf iel +ç§ij åѦ +èĢ ģ +re en +æľī æķĪ +Ġc or +Ġfeed back +Ġtechnolo gies +交 éĢļ +Ġad apt +' re +erv ation +Ġcommun ities +çݰ 代 +Ġlo ok +Ġf ac +ç͵ å½± +Ġcol lect +å¾Ĺ åΰ +h ips +Ġav ail +ere n +ä¸Ģ èµ· +çī Ľ +Ġpos s +Ġwe ather +Ġeffort s +¿ Ģ +æĹ ħ +o h +Ġcol labor +æĭ ¥ +æĪIJ åĬŁ +èİ· å¾Ĺ +å± ħ +Ġt re +Ġs ources +Ġstud y +Ġprogra ms +éĻ IJ +Ġt ips +Ġmark et +al ly +å® ³ +w ards +æ£ Ģ +ä¸Ģ ç¯ĩ +ri or +Ġto p +Ġe nd +å ĭ +Ġlar ge +ici ency +Ġde c +å®ļ çļĦ +ic ient +è¿ĩç¨ĭ ä¸Ń +lic ations +ç¼ º +Ġto ur +Ġto gether +人 å·¥ +Ġtool s +æĸ ¯ +æ° ij +æĬ Ĭ +ä¹ĭéĹ´ çļĦ +çī¹ çĤ¹ +Ġbe l +ditional ly +åĪ© ç͍ +è¾ ¹ +éĻ į +ĠI f +é¢ Ŀ +åį ı +å¾ Ģ +l ish +è¯ ī +in s +å¥ ¶ +Ġe conom +Ġinv est +ĠD o +t ain +åĩº çݰ +çļĦ å½±åĵį +ater ial +Ġs ure +Ġp ass +çĶ » +è´ £ +ç»ĵ æŀĦ +æķ ħ +æĥħ æĦŁ +æ ¿Ģ +ell ig +ä¼ Ĺ +æ¯Ķ è¾ĥ +ter n +Ġout comes +u p +Ġbe aut +re ad +çĶŁ æĪIJ +æķ° åŃĹ +Ġde m +i res +åı¯ä»¥ éĢļè¿ĩ +æĸ° çļĦ +Ġde ep +å ¨ +çĭ Ĺ +åħ³ 注 +çĶŁ åij½ +ä¼ł 绣 +Ġst ay +æŃ Į +åħ³ éĶ® +Ġpl ace +主 é¢ĺ +å¾Ī å¤ļ +èĪ Ĵ +Ġprofess ional +y le +æĽ ² +1 9 +Ġess ay +Ġg ive +ç³ ĸ +Ġon ly +æŁ IJ +Ġph ys +对 è¯Ŀ +Ġcont ro +Ġam ount +ce pt +iz ation +ç¼ĸ åĨĻ +åıĹ åΰ +Ġal ways +æ¯Ķ å¦Ĥ +Ġpriv acy +a u +____ ____ +Ġrespons ible +( ) +çŃī çŃī +Ġm aterial +Ġon line +é ¼ +æĶ ¿ +åĽ Ľ +Ġen joy +åľ Ł +Ġsaf ety +Ġt w +Ġcommunic ation +ä¸ ½ +æĺ ¾ +olut ion +er g +į ä½ľ +Ġus er +Ġemot ional +t ime +é ¾ +Ġse curity +Ġs ense +el ines +åĬ ± +çī© è´¨ +u ra +Ġsh are +Ġanalyz ing +it al +é ± +irt ual +Ġvis it +b ers +Ġc our +Ġpro ble +设 å¤ĩ +at ch +l and +é± ¼ +æĪij们 éľĢè¦ģ +ç¨ ³ +ib ility +Ġeff iciency +å£ ° +è Ĵ +æľº åύ +Ġcle ar +åζ å®ļ +iz ing +Ġcondit ions +l usion +Ġlo w +Ġl im +her s +Ġris ks +ç¿ » +Ġle t +åĴ ĸ +å¿ĥ çIJĨ +è¿ ľ +pr int +Ġchang es +Ġme as +Ġimpro ving +Ġc rit +5 0 +å¸Į æľĽ +Ġa ud +åį Ĺ +æĹł æ³ķ +Ġneg ative +项 缮 +u nd +at s +Ġcompan ies +æī¾ åΰ +Ġcont ribut +æŃ£ ç¡® +é» Ħ +å± ŀ +Ġunderstand ing +Ġm ult +Ġc lo +å¾ ģ +Ġp rior +r im +人工 æĻºèĥ½ +Ġvari ety +Ġt aking +å Ĥ +as ter +od y +Ġ { +çļĦ éĩįè¦ģ +Ġf ore +èµĦ æºIJ +è¦ģ æ±Ĥ +Ġfeat ures +èį ī +m e +èĮ ĥ +Ġo per +çº § +é² ľ +æĬĢ å·§ +ij æĪĺ +ç±» åŀĭ +æĿ ¿ +è½ ¯ +e w +Ġrest aur +Ġwith out +ruct ure +çļĦ æĺ¯ +ç ı +Ġl ist +ur ate +Ġbo ok +äº ² +åº Ĺ +ä¹Ł æĺ¯ +ä»» ä½ķ +Ġc am +ĠB e +Ġgo vern +Ġbehavi or +è®Ń ç»ĥ +Ġfam ily +æĿ Ĥ +Ġc ity +Ġappro ach +Ġacc urate +Ġs om +Ġe l +èĪ ŀ +è ŀ +åŁº æľ¬ +Ġdis e +Ġen coura +ĠW hat +å ĥ +è¯ ¦ +¦ Ĥ +å·¥ åħ· +åķ ¡ +Ġst ill +cho ol +æĦŁ åΰ +çĶŁ çī© +åĴĸ åķ¡ +åĩĨ å¤ĩ +Ġw aste +Ġev ents +æķĻ èĤ² +Ġ 8 +Ġm ust +i ed +as ing +å½¢ æĪIJ +Ġproduct s +åħ ¸ +è® ² +f ter +å· ® +l ess +Ġc ro +Ġfin an +åıį åºĶ +åĪĽ éĢł +Ġguid elines +åĪ ¤ +ä½ľ åĵģ +表 示 +å¼ Ĥ +Ġknow n +Ġt est +è¯ ¯ +o pe +Ġus ers +A I +å¾ · +ne w +è¿ ½ +iqu es +模 åŀĭ +åĬĽ åĴĮ +Ġhist ory +ĠA l +æĬķ èµĦ +å°Ŀ è¯ķ +an k +Ġh ome +éĴ Ł +ä¸ ° +èĪĴ éĢĤ +Ġincre ase +Ġh ab +åĪ » +è¾ĵ åĩº +Ġlead ing +Ġ 7 +é£İ éĻ© +Ġperform ance +Ġha pp +åŃ £ +Ġst and +t y +ç¦ ı +Ġcustom ers +åį İ +Ġbel ie +Ġcompan y +å½ ķ +é£Ł çī© +ĠU n +Ġsu mm +re nt +ĠC on +éĢĤ éĩı +an ced +Ġ i +Ġl ight +Ġanaly sis +å° Ĭ +ĠU se +ou se +t ed +Ġchar act +Ġ # +t o +ç» ľ +ä¸į æĺ¯ +Ġdevelop ing +åŁ ¹ +Ġstrate gies +Ġm ight +çŁ Ń +çļĦæ İ +Ġf irst +èĥ Į +çĮ « +Ġinclud es +åĽ Ń +Ġdi agn +Ġgrow th +ä¸ĵ ä¸ļ +Ġdo es +1 2 +ç» ¿ +Ġke ep +详 ç»Ĩ +åĥ ı +åıij çĶŁ +f act +åı¯ä»¥ åľ¨ +ç« Ļ +æĭ ī +æµ İ +Ġchat bots +Ġbre ak +è¡ ¡ +çŁ ³ +æĮģ ç»Ń +l ife +Ġ1 0 +æ´ Ĺ +ĠAd ditionally +å£ « +em ber +Ġgo als +å¾ ® +Ġv iew + · +o ve +åŁº ç¡ +Ġoptim ize +Ġt em +Ġd own +åŁºç¡ Ģ +è¶ ħ +er cis +Ġl ess +e es +æĿ ĥ +Ġke y +Ġwor ks +è® ¨ +åı¥ åŃIJ +Ġro bot +us s +åħ¨ çIJĥ +ç»ı æµİ +æīį èĥ½ +eg r +ä»ĸ们 çļĦ +äº Ķ +èµ· æĿ¥ +ç ĵ +Ġfact ors +Ġcult ural +æľ ¨ +Ġwork ing +ä¼ ¼ +èIJ ½ +éĢŁ 度 +ä½ ı +Ġeffect s +å© ļ +b r +åİ ħ +ra in +" ) +åѦ çĶŁ +" , +Ġp ar +at form +Ġens uring +çͱ äºİ +Ġm uch +Ġwor ds +Ġm ar +ç»ı éªĮ +为 äºĨ +åIJĪ ä½ľ +v en +Ġ / +Ġfinan cial +wor k +or ies +æ² » +Ġtechn iques +æĭ¥ æľī +ra p +å° Ķ +Ġ est +Ġavail able +Ġl it +æ ¹ +Ġeff icient +el s +o ver +Ġl and +Ġare a +Ġint ellig +Ġpre f +at ure +çŁ¥ è¯Ĩ +æĵ įä½ľ +å¾ ħ +ig ate +çļĦæ Ķ +Ġme an +b o +Ġcontro l +éĩĩ ç͍ +ric ult +Ġprogra mm +Ġto wards +th ing +ä¸į è¦ģ +Ġth ough +å½ © +Ġc ertain +Ġw ild +ä» Ĭ +Ġcons ervation +çŁ¥ éģĵ +Ġreal ly +çļĦ åľ° +i o +é¥ ° +Ġf ul +çݯ ä¿Ŀ +Ġexplo re +çļĦæ ¸ +Ġdivers e +åĬł 强 +çļ ® +Ġemot ions +Ġav oid +' ll +çļĦæ ī +åį ¡ +Ġpl atform +an ces +Ġsit u +ä» ĺ +ä½į ç½® +or ing +çĽ IJ +ä¸ ĩ +Ġde v +n ov +as h +Ġtw o +å® ł +b on +èµ ° +åĪĹ è¡¨ +Ġc y +èį IJ +ĠS ome +Ġexpl ain +Ġa ware +社 交 +d ay +åı Į +æ² ŁéĢļ +æ° § +å¼Ģ åıij +åħ¬åı¸ çļĦ +Ġa ir +åĩ » +ar ing +éĥ½ æĺ¯ +Ġlevel s +od s +Ġste ps +Ġc ap +æ´ ŀ +é© ¬ +Ġret urn +Ġm et +çĶŁ æĢģ +丰 å¯Į +æŁ ĵ +æīĢ ä»¥ +é¡ » +Ġ er +Ġf ra +3 0 +è ĵ +âĢ Ķ +Ġ å½ĵ +a h +ä¿ ĥ +Ġlike ly +ĠĠĠĠĠĠĠĠ ĠĠĠĠĠĠĠĠ +åĪ Ŀ +Ġcreat ing +Ġf arm +Ġb al +Ġl ives +å®ĥ çļĦ +Ġab ility +ä¸Ĭ çļĦ +Ġsent ence +åĤ ¨ +Ġr out +Ġprovid es +Ġag ain +å®ł çī© +éĢ IJ +Ġyear s +èŀ į +Ġphys ical +P ython +ĠE x +it ing +è°ĥ æķ´ +ç½ij 绾 +æħ ¢ +空 éĹ´ +åĽ ° +è± Ĩ +æĽ´å¤ļ çļĦ +ĠA r +Ġmain tain +å®ŀ éĻħ +Ġtra vel +Ġs at +p ro +ç͵ åŃIJ +æ± ½ +e x +åģ ĩ +æIJ Ń +éļı çĿĢ +è¿ĺ æľī +ç¤ ¼ +al e +Ġcons um +Ċ Ġ +n cy +Ġquest ions +f ort +m aking +Ġdes c +1 5 +Ġinvol ves +Ġst ress +åŃŠ符 +he re +Ġimpact s +Ġex ercis +åĿ ļ +led ge +ç§ij æĬĢ +oc i +Ġeffective ly +æ¶Ī è´¹ +Ġconc lusion +éĺ ħ +Ġst re +iss ions +æ· » +I t +éĿ Ļ +Ġv irtual +è¡ £ +Ġachie ve +our ce +è¿ ŀ +ac ks +表 æł¼ +Ġimport ance +èĩª æĪij +The se +n um +çļĦæ ł +Ġrelations hips +Ġwork ers +g ical +or por +ers on +åij ¢ +nd s +æİ¨ èįIJ +oh n +å¿ħ é¡» +容 æĺĵ +ĠG o +Ġt ell +ĠR es +on om +Ġbe c +æ³ Ľ +p os +Ġmo ve +Ġst ory +æŃ ¢ +Ġprior it +Ġindust ries +è ľ +Ġposs ible +ĠM an +Ġexp ress +ab ilities +Ġint egr +代 表 +Ġrespon d +åĪĨ éĴŁ +æľº ä¼ļ +Ġth ings +交 æµģ +Ġm eth +ur ther +Ġw ide +èij Ĺ +æĪij çļĦ +ĸçķ ¥ +id es +eth ing +ĠWh ile +p an +çŃ ĸçķ¥ +Ġc ent +Ġp lease +olo gy +ura cy +å¾ ª +w ard +n ce +Ġthe n +çª ģ +å¥ ĩ +Ġb lo +a i +æŀ Ĺ +ç®Ĺ æ³ķ +ç» ¼ +Ġpr int +ac es +l u +ª æĸ½ +p re +çļĦæĦ ı +Ġs ol +Ġover all +h old +Ġ es +çļĦ ä¸Ģ +éģ ĩ +Ġpop ul +å°ı 说 +æ³ ¢ +åį ģ +ä¹Ł åı¯ä»¥ +é£Ł åĵģ +Ġcont ent +å° Ħ +Ġrequ ires +æ£Ģ æŁ¥ +ĊĠĠĠĠĠĠĠĠ ĠĠĠ +Ġgrou ps +Ġf air +Ġb l +å®ŀ éªĮ +æĮī çħ§ +os p +st r +ä¸į èĥ½ +Ġh arm +Ġpro du +çļĦæĬ Ģ +ç ĩ +t le +Ġanim als +è§Ĵ èī² +le v +æ¸ IJ +å¤į æĿĤ +Ġde pend +æĮ ijæĪĺ +åĮħ åIJ« +Ġhelp s +Ġop en +Ġn et +ĠĠĠĠ Ġ +Ġstr ong +Ġj our +广 æ³Ľ +æķ´ 个 +Ġe lect +Ġrespon se +åįķ è¯į +æľ ĭ +Ġ < +åĮĸ åѦ +éĴ Ī +Ġqu ick +ual ly +Ġsom ething +Ġtra ck +度 åĴĮ +eren ces +æł ij +Ġacc uracy +Ġex c +é£ ŀ +Ġfiel d +寻 æī¾ +éħ ¸ +Ġh ope +ç ij +Ġin nov +ç» ª +al k +Ġtyp es +Ġd id +åĬ ª +Ġc all +è¯ Ĺ +Ġear ly +ĠO ne +a pp +Ġcomm on +æľĢ ç»Ī +Ġche ck +Ġs ym +çĤ Ĵ +æĬĢ èĥ½ +Ġen h +Ġag ricult +Ġim m +ç» ĩ +满 è¶³ +Ġs chool +b al +Ġfollow ing +b ased +Ġwe bs +Ġcult ure +ĠC om +w ay +ä¸Ģ å®ļ +åķĨ åĵģ +ud e +çļĦ åıijå±ķ +çĶŁ 产 +os ystem +Ġpl ant +åı ¶ +åIJ ĥ +ä»ĸ çļĦ +d er +è¯ ¢ +å®¶ åħ· +Ġf ree +ç§ » +æİ Į +Ġb ody +Ġp resent +Ġpartic ularly +Ġchild ren +Ġstud ent +) . +çī¹ å¾ģ +è Ķ +éĺħ 读 +æķĪ çİĩ +Ġprogra m +éħ ± +åıĺ å¾Ĺ +i x +Ġcom e +çļĦæ ² +ĠT e +ĠT o +åħ± åIJĮ +Ġemploy ees +说 æĺİ +Ġhe art +Ġm ot +æľĭ åıĭ +er ic +è¯ ij +Ġcur rent +æĪIJ æľ¬ +Ġto o +çİ© å®¶ +åĪĽ æĸ° +Ġec osystem +常 è§ģ +ä¸Ģ æŃ¥ +Ġp res +Ġmult i +åijĬ è¯ī +ä¸ ¥ +Ġm it +Ġact ion +çĨ Ł +Ġhab it +åı£ æĦŁ +ç® ± +Ġus es +å¢ŀ 强 +ç»Ļ åĩº +Ġ 9 +Ġde p +Ġeconom ic +æĢ§ çļĦ +1 8 +åĨ ° +Ġhelp ed +åIJ¸ å¼ķ +çİ ĭ +Ġdiagn os +å ł +èģĶ ç³» +ç¾ ¤ +ç»ĥ ä¹ł +æĪIJ éķ¿ +Ġpo int +å®ļ æľŁ +åij ¼ +èį ¯ +æĿ ¯ +æ¤ Ĵ +æķĪ æŀľ +Ġspec ial +æ· · +åĩł 个 +aus e +é Ĩ +æ¯Ķ èµĽ +è· Ŀ +W hat +Ġt imes +ic les +Ġ * +ç´ § +å¦Ĥæŀľ ä½ł +çĭ¬ çī¹ +çģ µ +ç¨ İ +Ġcar bon +Ġbi as +åĬ© äºİ +Ġcon st +èĩª çͱ +æĿ¥ 说 +å°± æĺ¯ +åį ° +Ġme et +è§Ħ åĪĴ +çļĦç ¾ +èIJ¥ åħ» +at ors +稳 å®ļ +od e +çħ ® +Ġass oci +å¿ Ĺ +è¡Į æĺŁ +æĿ İ +Ġrev iew +åĩ Ģ +ĠR o +Ġknow ledge +以 便 +æµĭ è¯ķ +åIJĪ éĢĤ +s c +å½¢ å¼ı +Ġfriend s +Ġnat ure +Ġcrit ical +æ´ ĭ +Ġa fter +er ve +Ġre ce +çļĦæ Ń +æ±½ 车 +çķ Į +Ġlo ss +Ġapp lications +å¤ļ ç§į +éĶ ħ +ä¸ ² +Ġins p +-- - +ĠS h +Ġv ol +l ut +o ks +se qu +Ġb ir +åIJĪ çIJĨ +Ġne cess +æĪij æĥ³ +çŃī æĸ¹éĿ¢ +é¼ ĵ +Ġso ft +Ġl ive +å°ı æĺİ +ĠI nd +Ġbr ing +æĺ¯ æĮĩ +Ġso il +il ar +ä¸ ľ +æĿ¡ ä»¶ +Ġt ri +äº ® +Ġm om +æı ¡ +ä¼ ° +ŀ äºī +çĽ ij +èĤ ¤ +è´¢ åĬ¡ +æ·» åĬł +饮 é£Ł +Ġallow ing +åº ķ +Ġr ight +Ġexp ert +Ġsu pp +Ġin it +çļĦæ µ +ar get +Ġexp ect +Ġ1 9 +Ġmeas ures +olut ions +j ust +ar c +å° ļ +Ġpract ice +æľī åĬ©äºİ +大 éĩı +' , +im ent +Ġcontin ue +Ġdisc uss +1 00 +éļ ľ +çļĦæĦ Ł +Ġref lect +it ation +åį « +äºĨ ä¸Ģ +ne y +ĠL e +is ed +è¶ ĭ +äºĨ ä¸Ģ个 +Ġincre asing +çļĦæ Į +Ġst ru +æĢ» ç»ĵ +e ly +å® ĩ +Ġaut hor +表 éĿ¢ +Ġ x +æķħ äºĭ +em ic +Ġrep resent +g er +Ġincre ased +on es +ain s +Ġtrain ed +Ġf ish +Ġst ate +åĨ · +çĶŁ éķ¿ +Ġre new +ord ing +åĮ Ĺ +æİ ªæĸ½ +å¹³ è¡¡ +Ġsuccess ful +ä¸ĭ éĿ¢ +Ġactiv ity +èĮ ¶ +éĢĤ åºĶ +èĦ ij +æİ¢ ç´¢ +ff ic +ç»Ħ æĪIJ +at ives +äº ļ +Ġsc en +æ² Ļ +g ress +使 å¾Ĺ +æī ¿ +Ġdisc rim +Ġassist ants +Ġex ist +çķ Ļ +Ġsp ace +æľĢ è¿ij +Ġide as +éĩĩ åıĸ +l ight +注 éĩį +çļĦæĹ¶ éĹ´ +è¿ İ +Ġcom b +éĢĤ å½ĵ +Ġyour self +rit e +as on +åĮ Ģ +åı¯ä»¥ 使ç͍ +åħħ 满 +Ġval ues +æ ½ +Ġbi ases +ä¿ĥ è¿Ľ +åľº æĻ¯ +ro ss +åį³ åı¯ +Ġc ru +Ġnum ber +Ġty pe +r ast +åĩĨ ç¡® +Th is +Ġp ast +çģ ¯ +å®ļ ä¹ī +Ġs olutions +Ġt er +ä¿Ŀ è¯ģ +èĶ ¬ +å¹ ¸ +åī § +åħ´ è¶£ +å ª +ent ion +av or +Ġsc ient +åĬª åĬĽ +Ġprovid ers +Ġpolic ies +al u +ĠI m +Ġallow s +Ġintellig ence +çļĦ æĸ¹æ³ķ +è¿Ļ æĺ¯ +Ġ ` +Ġem issions +Ġ å°Ĩ +Ġmean ing +Ġst yle +åİŁ åĽł +Ġstru gg +çļĦç¾ İ +if ul +dit ion +éĥ½ æľī +空 æ°Ķ +å®ĥ们 çļĦ +ä¼ĺ åĮĸ +Ġinf lu +åŁº äºİ +Ġdetail s +Ġtranspare ncy +Ġm ess +ĠC l +Ġg ame +p ri +è¶ĭ åĬ¿ +å½ Ĵ +ç¿» è¯ij +æķ £ +B y +é Ń +ĠAm eric +Ġproduct ion +Ġinc orpor +æĻ ļ +Ġinvol ve +Ġh ot +æĻ ® +b y +Ġf low +Ġem erg +åº § +Ġide a +åİĭ åĬĽ +éĿ Ĵ +om s +èģĮ ä¸ļ +Ġre port +Ġp ap +Ġthe rap +Ġs al +åıĤ ä¸İ +æĸĩ åѦ +æIJŃ éħį +o ot +) , +Ġc r +Ġprocess es +g in +å¹³ åı° +å¯ Ł +Ġpromot ing +æļ ĸ +ake hold +ç» § +iv er +æ ¦Ĥ +Ġmodel s +Ġd ra +è ĸ +Ġgrou p +è¶³ å¤Ł +Ġg reen +Ġhealth y +Ġcom fort +Ġad ditional +ä¸Ģ 次 +é¤IJ åİħ +Ġmaterial s +Ġman age +çļĦæ ¯ +ä¼ ¤ +åıĬ æĹ¶ +Ġg lo +Ġst at +å¿« éĢŁ +Ġmonitor ing +ail y +ra nd +o ice +res h +ç»Ħ ç»ĩ +Ġund er +Ġnecess ary +Ġhelp ful +ĠC ol +é»ij æ´ŀ +åģļ åĩº +Ġcour se +Ġm at +Ġle g +Ġf ace +ä» ¤ +èī¯ å¥½çļĦ +oc k +åĮ» çĸĹ +çĽ ĸ +id ence +Ġassoci ated +Ġpro gress +åľ Ĩ +Ġevery one +ç¼ ĵ +ĠEn g +w ord +èĵ Ŀ +天 æ°Ķ +Ġact ions +em s +ĠP l +å® Ļ +us h +é¡ ¾ +Ġcost s +at or +ç© ¿ +Ġamount s +èͬ èıľ +. . +Ġman ner +Ġcon sequ +æ°Ķ åĢĻ +Ġins ights +be ing +at ory +en er +le x +Ġme ans +Ġcollabor ation +Ġpers pect +or m +pri ate +å°Ĭ éĩį +Ġt arget +è®° å½ķ +åĢ Ĵ +Ġrenew able +æĦ ¿ +èĥ½ æºIJ +Ġin put +å®ĩ å®Ļ +a pe +Ġad just +er ies +Ġd ire +ä¾ Ŀ +ust r +f ect +Ġbeaut iful +Ġd ue +re ci +çĮ ® +èĥĮ æĻ¯ +èĤ ¡ +Ġd am +i k +Ġadv anced +缸 对 +åIJį ç§° +Ġsh ort +Ġob ject +è¿Ļ éĩĮ +éĢł æĪIJ +èIJ¥ éĶĢ +çļĦæĥħ æĦŁ +ç¥ ¨ +Ġcount ries +in ing +ist ic +Ġpl ans +è´£ ä»» +Ġst akehold +t he +Ġass ess +æĢĿ èĢĥ +e ch +æĪIJ åijĺ +2 1 +Ġd aily +Ġcomp ut +çļĦæĥħ åĨµ +æıIJ åĩº +Ġ âĢľ +åª Ĵ +ä¸Ń å¿ĥ +is hed +ĠS e +onom ous +er n +ç»´ æĬ¤ +am es +Ġpriorit ize +çº ¸ +èĤ ¥ +Ġtem per +æ¸ħ æ´ģ +us e +æ± ¡ +Ġmin im +æĺ¯ åľ¨ +大 å°ı +åĵª äºĽ +Ġapp reci +ren g +Ġregul ations +Ġ Z +éĶĻ è¯¯ +r ans +èĢĮ ä¸Ķ +èĪ ¬ +èij ± +è Ĩ +æ°´ å¹³ +è´Ń çī© +åŃĹ符 串 +对 æĸ¹ +Ġh im +Ġconsequ ences +å· ´ +é¼ĵ åĬ± +Ġf il +人 åijĺ +è·Ŀ 离 +ĠW hen +çļĦæ° ´ +çī© çIJĨ +åIJĮæĹ¶ ä¹Ł +åľ¨ è¿Ļ个 +åħ¶ 次 +, " +æ¶ ² +çĶ · +iv al +åı¯ä»¥ 让 +æĥ ¯ +Ġadv ance +Ġve h +å¦Ĥæŀľ æĤ¨ +Ġest ab +ri pt +ç« ¯ +ä¸į ä¼ļ +Ġtranspare nt +æķ° éĩı +çĽ ĺ +Ġspe ak +Ġp ark +Ġstakehold ers +é º +Ġev ent +çļĦæķ° æį® +èĩª åĬ¨ +ç»Ĩ èĬĤ +è¯Ħ ä¼° +æ¶ ¦ +Ġpref erences +Ġve get +æį Ł +e qu +Ġg l +Ġp ain +o gra +Ġtra ffic +Ġo ce +ä¹ ĺ +e xt +âĢĿ ï¼Į +Ġan other +å¤ļ å°ij +Ġagain st +ç»ı åİĨ +计ç®Ĺ æľº +èĢ IJ +软 ä»¶ +ĠP re +Ġpl ants +缸 äºĴ +é¢ ij +\ _ +Ġs ame +ru g +Ġval u +Ġo cc +çļĦç ¤ +Ġsustain ability +ĠS he +d e +ot e +Ġd ig +N A +Ġcru cial +æī § +å± Ģ +æĭ Ł +æĭ Į +Ġn on +Ġeng aging +Ġinter n +L P +温 度 +æł ¸ +æĬ¥ åijĬ +æĿ¥ è¶Ĭ +h ood +ä¸ī 个 +å¦Ĥ ä¸ĭ +çī© ä½ĵ +for ce +Ġneed ed +Ġim ages +Ġbuild ing +ici ous +Ġ æĪij +è¶Ĭ æĿ¥è¶Ĭ +æĶ¾ åħ¥ +g o +éĻį ä½İ +å½ĵ åľ° +æ¶Īè´¹ èĢħ +ç £ +ivers ity +é¢Ħ ç®Ĺ +ic le +æ·· åIJĪ +Ġpartic ip +Ġdis hes +Ġthrough out +Ġwith in +åı ³ +é«ĺ çļĦ +Ġph ot +Ġtr ust +æĦı è¯Ĩ +以 ç¡®ä¿Ŀ +çĬ¶ æĢģ +Ġautom ation +1 1 +Ġpo st +æīĭ æľº +wor ks +éĢ ı +åº ĵ +Ġw ind +Ġ= = +Ġprocess ing +èĮĥ åĽ´ +æĦı ä¹ī +追 æ±Ĥ +à © +å¾ Ħ +éĿ ł +ä¸ ĸ +èĻ ½ +ç« ŀäºī +Ġappro priate +æĽ´ 好çļĦ +Ġcharact er +c l +ç§ ĺ +it ude +Ġte ac +le ep +ĠDe velop +in ce +å· ¦ +g round +è¡Į ä¸ļ +éĴΠ坹 +å¿ħ è¦ģ +Ġdet erm +-------- -------- +Ġst reng +d o +Ġchalleng ing +or k +Ġan x +èī² çļĦ +Ġh ard +æĺİ ç¡® +åĪĨ 享 +æĶ¹ åıĺ +ä½ ³ +åıª æľī +å±ķ 示 +Ġcam p +çº ³ +a j +et ic +u ment +ä½ł åı¯ä»¥ +Ġpol lut +Ġh ig +pp ing +e ad +çĦ¶ èĢĮ +第 äºĮ +é¸ Ł +çī© åĵģ +ä¸ ¾ +Ġencoura ge +pe cial +Ġac ross +el ves +äºĭ ä»¶ +c le +æ © +åªĴ ä½ĵ +n ers +Ġc al +èϽ çĦ¶ +åĽ º +ä¹ł æĥ¯ +Ġsaf e +èĥ½ éĩı +ist ics +ä¹ĭ åīį +Ġiss ue +å¤ļ 个 +åĨ³ çŃĸ +è¾¾ åΰ +æĹ © +ä¸į åı¯ +ä¸Ģ 缴 +å· ¨ +æĦŁ è°¢ +ĠN ew +ä¸Ģ 段 +Ġmach ines +å°Ĩ åħ¶ +ç»§ ç»Ń +Ġwor d +çī¹ åĪ« +Ġagricult ure +æĢ İ +éĢIJ æ¸IJ +éĵ ¾ +è¯ ¾ +Ġk ind +å¢ Ļ +è°¢ è°¢ +Ġalgorith m +è£ħ 饰 +Ġal ong +Ġeas y +äº ij +è§£åĨ³ æĸ¹æ¡Ī +Ġaware ness +' ve +æĸ¹ åIJij +Ġne ver +Ġquick ly +Ġres pect +çļĦæ Ļ +Ġam ong +Ġaccount ability +Ġl aw +en ing +Ġdef in +Ġsur round +éĵ ģ +Ġpower ful +A n +Ġcaus e +æ ¥ +æİĮ æı¡ +è¿ĺ æĺ¯ +Ġcreat ive +è¡ Ģ +Ġloc ated +un ning +åľ° åĮº +éĿ¢ 积 +éĽ ¨ +Ġne ar +Ġinit i +ress ion +ä¸ĭ æĿ¥ +2 5 +é© ¶ +¾ çĹħ +ab les +æľī è¶£ +循 çݯ +çŃĶ æ¡Ī +çł ´ +ic ation +éĻ ¢ +æ²» çĸĹ +Ġad dition +äºĭ æĥħ +Ġbec ause +åı Ī +èĤ Į +çº ª +s ide +æĭ ħ +æ¹ ¿ +åį Ĭ +é¡ º +ĠA nd +Ġrestaur ant +Ġv ide +Ġproble m +az ing +Ġmem bers +Ġn ut +Ġc ou +æµ ª +Ġ è¿Ļ +Ġhelp ing +ĠI s +æıIJ åįĩ +ĠĠĠĠ ĠĠ +Ġsh o +Ġre lev +Ġar g +Ġbal ance +ill ed +æĺ¯ ä»Ģä¹Ī +åĬĽ éĩı +ire d +å¤ ľ +åı¯ æĮģç»Ń +Ġper fect +* * +ific ation +æ¶ ī +Ġwild life +an e +Ġrel ated +室 åĨħ +åº ľ +享 åıĹ +our s +è· ij +åķĨ ä¸ļ +ach ing +Ġsu n +Ġrecogn ition +el t +Ġor der +å¹³ åĿĩ +g ing +ä¸ ´ +çĤ ¼ +Ġgo ing +åij¼ åIJ¸ +Ġsoft ware +Ġre mot +èijĹ åIJį +幸 ç¦ı +Ġenh ance +èĻ ļ +Ġn ow +Ġth reat +Ġd est +åĿĩ åĮĢ +Ġac ad +åºĶ 对 +çľĭ åΰ +c ast +è¾ Ĩ +ific ial +Ġ very +o ok +åĮº åŁŁ +¹ ģ +æĪ¿ éĹ´ +æıIJä¾Ľ äºĨ +Ġmot iv +Ġaccess ible +åĨ³ å®ļ +Ġh y +å® Ī +Ġf lo +u g +Ġinform ed +åĵģ è´¨ +çļĦç Ł +av es +ar r +ĠW ith +le t +è§Ĥ çĤ¹ +en ge +è¡Į åĬ¨ +f riend +ç³ ķ +Ġf urther +ĠE ns +ç§ ģ +Ġad o +Ġcle an +缸 åºĶ +Ġf re +pecial ly +è Ĺ +Ġc apt +çļĦç ľ +Ġsome one +Ġc ell +æĶ¾ åľ¨ +欢 è¿İ +Ġ âĢ +Ġdev ices +çļĦ æĸ¹å¼ı +Ġjob s +au gh +n ot +æľī äºĽ +åħ¬ åħ± +g est +çļĦ çĶŁæ´» +çľ ¼ +çļĦ ä¿¡æģ¯ +ĠC ons +æİĴ åºı +Ġbenef it +re ct +å¤ ı +un te +符 åIJĪ +ä¸Ģ ä½į +åĨħ éĥ¨ +Ġlook ing +d ing +æĬ ĺ +è¾ ij +è¿Ļ个 éĹ®é¢ĺ +Ġes pecially +çľ ł +âĢĿ ãĢĤ +å¥ ı +ra y +è¿ĺ åı¯ä»¥ +åĪĽ ä½ľ +com ing +Ġmulti ple +éļ IJ +æ³ ¡ +æłĩ åĩĨ +Ġm il +éľĢè¦ģ 注æĦı +Ġanx iety +æĶ¹ è¿Ľ +å± ĭ +污 æŁĵ +ç¼ĸ ç¨ĭ +è´¹ ç͍ +Ġev alu +imate ly +Ġlit er +ogra ph +Ġse arch +1 6 +en ced +Ġmeth ods +çĥ Ī +模 å¼ı +çĬ¶ åĨµ +æĶ¹ åĸĦ +å¤ļ æł· +c er +å¥ ĸ +Ġsat is +Ġwebs ite +åĬ ŀ +åģ¥ èº« +Ġglo bal +Ġas k +Ġplatform s +Ġdise ases +çݰ 象 +t ics +æ± ģ +åΤ æĸŃ +Ġcon vers +Ġrelations hip +设 ç½® +æ³ķ å¾ĭ +Ġmind ful +é¢Ħ æµĭ +o very +åģ ľ +ç͵ è§Ĩ +è§Ħ åĪĻ +ak en +Ġimplement ing +is ing +åıĤ åĬł +æĥħ 绪 +Ġprovid ed +æ·± åħ¥ +Ġprogramm ed +Ġrelev ant +çļĦç ĥ +çĸ ¾çĹħ +åĮ» çĶŁ +åĪĽ 建 +Ġgener ate +æĶ¶ åħ¥ +ä¼ ij +iz es +Ġtrans form +éģ µ +ast ic +åij Ī +æ¯ı 个人 +è¿ Ķ +i et +Ġv oice +éĢ Ķ +æĶ¾ æĿ¾ +åį ´ +èĥ ľ +Ġst ructure +æĹ¶ å°ļ +Ġ Q +Ġel se +du c +Ġem p +èģ ļ +è´ § +ac hes +ç§ Ģ +an ks +Ġn ight +Ġprofessional s +Ġb as +è´ µ +e c +Ġdivers ity +it es +d r +åĽ° éļ¾ +ĥ åľ +åŀ ĥåľ +åŀĥåľ ¾ +Ġd rug +ç¢ ³ +Ġn ame +åĮĸ çļĦ +a id +æľĢ 大 +æij Ħ +ç®Ģåįķ çļĦ +Ġw arm +Ġd one +Ġfun ction +as c +强 è°ĥ +Ġdem and +Ġvis ual +Ġup d +æŃ£ åľ¨ +Ġsim ilar +éĢ Ĵ +æ¯ Ľ +éĶ » +ent ly +Ġvalu able +Ġdis aster +ä¸Ģ èά +æ´ ² +ĠR eg +Ġdiscrim ination +åĨĻ ä¸Ģç¯ĩ +Ġgovern ment +Ġ 好çļĦ +5 00 +ly ing +Ġpre v +Ġpre pare +Ġproble ms +è· ³ +Ġpro m +åĨ ² +å®ī è£ħ +éĶ» çĤ¼ +æµ ĵ +è ¹ +åºĶç͍ ç¨ĭåºı +n g +Ġcomp et +åĪĨ åĪ« +olo gical +å® ¡ +Ġtrans l +Ġdire ct +åī Ĥ +Ġsuggest ions +Ġpap er +Ġrecogn ize +t on +Ġmit igate +讨 论 +äºĴ åĬ¨ +ĠE ar +Ġam azing +c re +é¦ Ī +Ġinvol ved +f ace +æľī åħ³ +) ) +Ġex ce +Ġproduct ivity +è Ń +é¦ Ĩ +Ġsound s +Ġidentify ing +] , +é¾ Ļ +Ġf it +Ġcontribut e +th s +friend ly +e le +if ied +iven ess +ite ly +Ġ X +Ġl ed +åĿ ı +Ġhist or +Ġd at +Ġjour ney +Ġ } +Ġse lect +æ¼ « +Ġcon duct +è¿Ľ ä¸ĢæŃ¥ +ç»Ļ æĪij +Ġl if +è£ħ ä¿® +为 ä»Ģä¹Ī +äº ¬ +Ġn av +Ġwho le +ç ¹ģ +åĨ ľ +æĶ » +Ġb reat +Ġm iss +é¾ Ħ +t t +s w +Ġb ar +请 éĹ® +èģĶ ç½ij +Ġatt ract +æĤ¨ åı¯ä»¥ +O ne +åħħ åĪĨ +r ing +Ġå½ĵ çĦ¶ +re am +Ġev ol +Ġs n +ĠE m +m osp +Ġcho ose +v iew +Ġar r +Ġs leep +end ed +æŀ ¶ +Ġveh icles +Ġf resh +Ġorganiz ation +è¿Ļ 段 +æ± ¤ +ĠI nt +Ġcont ext +åı¦ å¤ĸ +Ġoce an +æĦŁ åıĹ +Ġpollut ion +ur b +æī§ è¡Į +erson al +ĠHe alth +ä¼ĺ çĤ¹ +Ġatt ention +æľī çĿĢ +é£Ł æĿIJ +Ġer r +çļĦæĿ ¥ +çļĦç Ī +èŃ ¦ +è· Ł +æĹħ è¡Į +èĴ ľ +çļĦæĢ Ŀ +Ġchat bot +çļĦ éľĢæ±Ĥ +çķ ¥ +Ġfeel ing +Ġimplement ed +社 åĮº +çļĦ 建议 +æIJ ħ +éĹ » +åıį é¦Ī +缴 æİ¥ +æĺ ¥ +it able +æĪij ä¼ļ +åį ± +èī¯ å¥½ +Ġl iving +åıĺ éĩı +ĠB ut +Ġcomple te +Ġtre nds +Ġm akes +ä»Ĭ 天 +Ġdist ribut +Ġcomm it +Ġat mosp +ä¼ ´ +Ġsens ors +Ġs w +æĹł 论 +om en +æĶ¿ åºľ +Ġchall enge +Ġt urn +çIJĨ 论 +p ar +Ġwrit e +ç»ı åħ¸ +em ember +é¥ Ń +æĸ¹ 便 +Ġc u +Ġval ue +Ġf und +p ose +è°ĥ æŁ¥ +çĿ ¡ +Ġcommunic ate +Ġdise ase +Ġrese arc +Ġl ack +arn ing +ĠP ark +çĦ ¦ +é«ĺ 度 +Ġr ather +å® £ +çĪ ¶ +éĺ ¶ +è® ¢ +çĥ § +Ġhig her +Ġsumm ary +ĠA ut +çļĦæ ³ +Ġe le +is ms +Ġrel i +ä¹Ł ä¼ļ +f ra +åijĬè¯ī æĪij +æĬ ½ +Ġsitu ations +Ġmar ine +æĥ³ è¦ģ +in ci +in al +Ġg ain +Ġdiffere nce +æľºåύ 人 +æµģ ç¨ĭ +ĠC hat +ç½ij ç«Ļ +æľ « +Ġcol or +Ġas pect +ç½ Ĺ +ĠE duc +Ġde ploy +Ġbeaut y +æĤ £ +ruct ion +it ut +æĿ Ł +让 æĪij们 +éķ¿ åº¦ +ul es +æ¶ī åıĬ +Ġdig ital +Ġexist ing +ĠO r +\_ \_ +Ġback ground +çĹ ĩ +æ¯ı 天 +p ython +Ġfarm ers +Ġcontin u +" : +Ġg iven +å°ı æĹ¶ +Ġmom ent +2 00 +J ohn +éĿ¢ 对 +Ġint ro +Ġtherap y +è¿Ķ åĽŀ +å¹¶ åľ¨ +Ġ z +Ġaff ord +ä¸ Ŀ +å® ½ +Ġ à +ĠN ational +èĥ ¡ +Ġexercis e +æIJħ æĭĮ +æĶ¯ ä»ĺ +éĺ³ åħī +è¯ ļ +Ġs ect +ĠS u +å¢ŀ éķ¿ +ç¾İ 丽 +Ġw a +以ä¸ĭæĺ¯ ä¸ĢäºĽ +èĽĭ ç³ķ +Ġ ill +æ¸ħ æĻ +et ry +æ¢ ¦ +ç¾İ åĽ½ +ä» į +one y +Ġecosystem s +æĮĩ 导 +d ef +9 9 +æŁ Ķ +pp ed +Ġlim it +çİ ī +Ġacad emic +Ġrestaur ants +Ġhe ad +ä¿¡ ä»» +ast ers +å² ģ +ak ers +1 4 +A s +æł ¡ +é«ĺ æķĪ +ph as +y n +ç¨ĭ 度 +è¾ £ +ä¸Ĭ éĿ¢ +å®¶ å±ħ +ter m +ç¾İ é£Ł +Ġo vers +å® ĺ +Ġind ic +ĠY our +S t +å½¢ 象 +è´ ¡ +åº Ĭ +ĠS c +ag ra +羣 æŃ£ +o int +id s +are nt +éĵ ¶ +èģ Ĭ +Ġreg ular +ä¼ĺ ç§Ģ +Ġcol le +çĸ ij +Ġsub ject +Ġgreat er +Ġst ore +åŁ¹ è®Ń +Ġim ag +Ġan sw +ä½ Ļ +Ġsp ot +åĪĨ åŃIJ +Ġaud ience +p et +Ġv ers +Ġtra il +åĭ ĩ +er ous +Ġguid ance +Ġspe ech +åĵ ² +æĺ¯ çͱ +è´¡ çĮ® +åIJĪéĢĤ çļĦ +设 æĸ½ +ä»ĸ 人 +ens ive +åĢ ¾ +al ing +Ġproject s +å ³ +Ġt akes +ç» © +T hat +Ġb ro +iv ed +Ġ & +åĿ IJ +place ment +è¿ŀ æİ¥ +çļĦç¤ ¾ +ĠT ra +Ġrel ax +u fact +éģ į +Ġsur v +åı£ åij³ +Ġcreat ivity +o f +å¨ ģ +çļĦç ł +Ġbreat h +Ġpl aces +Ġdesc rib +èĭ± è¯Ń +Ġdam age +or ation +为 æĤ¨ +if t +Ġc ase +å¹´ é¾Ħ +Ġp ress +çĶ ľ +éĩ İ +æĹħ 游 +Ġt aken +in ed +Ġcon cept +æĴ Ń +Ġinterest ing +è· µ +Ġse a +6 0 +Ġf oot +ĠN ame +Ġresearc hers +éĢ ģ +Ġwe e +) ; +çļĦ åħ³éĶ® +ä¼ ½ +ele br +å¡ ij +W e +ç»ı 常 +Ġpopul ations +åħ¬ å¼ı +or n +çĩ ĥ +人 çĶŁ +1 7 +æİ¥ åıĹ +Ġloc ation +Ġin equ +Ġinter vent +Ġinterest ed +Ġdefin itely +Ġassist ance +è¿Ļ ä¸Ģ +åIJĪ åIJĮ +ä¼ĺ åĬ¿ +çļĦ å·¥ä½ľ +Ġ1 2 +Ġmo v +åģ ı +åŃĺ åĤ¨ +us ive +æĹ ı +ï¼ī ï¼Į +Ġg as +Ġinterest s +æ¸ħæĻ ° +Ġg ard +çĸ « +Ġs ay +å¤ « +g es +èIJ ¨ +ä¸ļ åĬ¡ +个 æĢ§ +åIJ ¯ +Ġeng agement +Ġb ig +éľĢè¦ģ èĢĥèĻij +Ġpr inci +åij¨ åĽ´ +Ġopportun ity +çģ ¾ +èĹ ı +re l +缺 çĤ¹ +Ġhapp y +åĴĮ åħ¶ä»ĸ +av a +Ġestab lish +鸡 èĽĭ +i king +ĠT rans +rast ructure +fore st +èİ· åıĸ +èĦ ļ +in ally +èµ ı +Ġdel icious +Ġresult s +è§Ĥ å¯Ł +å®ŀ è·µ +Ġl ast +Ġpol it +æĢ§ èĥ½ +F or +b i +缸 ä¿¡ +ff ee +Ġph r +Ġfore st +ell ing +æµģ è¡Į +at ic +大 å®¶ +ĠIn st +æķ° åѦ +æī © +å®Į åħ¨ +å¼ķ èµ· +es e +转 æį¢ +Ġaffect ed +Ġrobot ics +综 ä¸Ĭ +Ġpro p +让 人 +æ² ³ +ä¸Ń æľĢ +Ġaut onomous +Ġha ving +Ġtri p +ur y +Ġbi ased +Ġconsider ations +Ġpartic ular +åį ł +æİ¨ 广 +Ġiniti atives +ial s +åij³ éģĵ +Ġtreat ments +Ġem phas +çĭ¬çī¹ çļĦ +Ġl ay +æĶ¿ çŃĸ +æĢİ ä¹Ī +ron ic +pl ay +Ġco ok +è¿Ľ åħ¥ +è½ ® +Ġvol unte +Ġra in +ĠM on +Ġconsum ption +èĽĭ çϽ +ĠS oc +å£ ¤ +Ġrout ine +Ġimpro ved +T o +人 çī© +读 èĢħ +Ġgo al +广 åijĬ +éķ¿ æľŁ +Ġe y +H e +Ġout do +Ġcu is +Ġa way +Ġbo oks +Ġtop ic +大 åĪ© +h ouse +Ġon es +ç§ Ł +' : +æĪ¿ å±ĭ +ç§» åĬ¨ +Ġdis asters +est s +ill ing +绿 èī² +åĵ² åѦ +æĪIJ åĪĨ +Ġocc ur +ľ ä¼½ +åľŁ 壤 +çļĦ 主è¦ģ +çݰ å®ŀ +Ġanim al +é¢Ĩ 导 +Ġview s +éĤ ® +æ°§ åĮĸ +ath y +éģĵ å¾· +社交 åªĴä½ĵ +ĠP ersonal +Ľ åĽ´ +Ġpur ch +Ġcount ry +Ġrem ind +å¯ ¸ +Ġr ights +çļĦ çݯå¢ĥ +ĠP r +Ġl ine +ib r +é© ¾ +Ġm aj +Ġover come +Ġne xt +æīĢ è¿° +è§Ħ å®ļ +Ġinteract ions +Ġconf lic +Ġwh y +ç³» åĪĹ +å° ¼ +ib ly +çīĽ å¥¶ +Ġrespons es +s es +åѦ ä¼ļ +b ol +Ġstand ards +ul ner +对è¯Ŀ åĨħ容 +l ished +çļĦæĢ § +çĶŁæĢģ ç³»ç»Ł +an n +æĥħåĨµ ä¸ĭ +寻 æ±Ĥ +Ġh old +d en +åį ĥ +Ġment ion +ĠMan y +缴 åΰ +éģ Ĺ +he l +Ġbelie ve +ar ies +æľī ä¸Ģ个 +1 3 +Ġatmosp here +Ġm or +æĹ¥ æľŁ +ä¹ ħ +ä½ł 好 +Ġaddress ing +ĠâĢ ĵ +çļĦåľ° æĸ¹ +m ing +Ġcan not +Ġman ufact +Ġp ie +ic ing +Ġstud ies +ç¾İ åij³ +ĠAmeric an +ĠN LP +Ġacc ording +ms elves +èĦ Ĥ +èĩª ä¿¡ +æīĢ éľĢ +Ġthe mselves +Ġremot e +åŁ¹ åħ» +å®ī æİĴ +ä½ł éľĢè¦ģ +Ġreg ard +ir ing +è¯Ĩ åĪ« +Ġart icle +æģ Ĵ +æĢ» çļĦæĿ¥ +Ġal ign +æ± ł +ten ance +fact ion +åĬ¨ ä½ľ +çļĦç © +ç¼ © +æĢ ¥ +Ġ1 00 +Ġtest ing +åŃĹ æ¯į +å¹´ è½» +åζ éĢł +Ġs we +å° º +he ns +æ°´ æŀľ +Ġinf rastructure +èī² å½© +æĢ»çļĦæĿ¥ 说 +æľī ä»Ģä¹Ī +te xt +车 è¾Ĩ +Ġp ay +ro p +Ċ ĠĠ +Ġcaus ed +Ġcor rect +Ġ ì +èĥ ŀ +ĠM ed +ç²¾ ç¥ŀ +æ°ĶåĢĻ åıĺåĮĸ +ĠR ed +äºĴ èģĶç½ij +Ġeng age +åĪĨ 为 +ĠD ata +Ġful l +en c +éĩį æĸ° +æŃ£ç¡® çļĦ +çļĦæ° Ķ +åıĮ æĸ¹ +Ġcom es +åı¤ 代 +æŁIJ äºĽ +åijĪ çݰ +Ġto day +ag ed +æĪij åı¯ä»¥ +æĹ¥ 常 +æ» ij +Ġcl in +Ġ \ +Ġo bs +Ġart ificial +Ġexce ll +çļĦç ¬ +all s +Ġprodu ce +ĠD es +os s +è¹ Ī +Ġdra w +Ġlet ter +Ġadv ice +Ġhigh ly +çĬ ¯ +综ä¸Ĭ æīĢè¿° +满 æĦı +Ġprinci ples +èĮ Ħ +Ġfeel ings +çļĦæ ´ +Ġh om +Ġf ail +Ġcro p +å§ ľ +Ġquest ion +Ġdis abilities +èĪŀ è¹Ī +Ġimp lications +r al +Ġs ing +4 0 +Ġfam il +Ġgovern ments +Ġrec ord +å½¢ çĬ¶ +Ġbe gin +is es +çļĦæĥ ³ +ach ine +è° ± +Ġv ulner +Ġpro per +Ġovers ight +è´Ł éĿ¢ +Ġem ail +Ġnew s +Ġexpl oring +Ġf avor +æ¥ ¼ +å® ľ +Ġun ivers +å·® å¼Ĥ +ï¼ī ãĢĤ +è§£åĨ³ éĹ®é¢ĺ +Ġfam ous +g n +Ġmess age +at itude +Ġc ra +Ġco ver +æ·± åĪ» +åı¯ä»¥ éĢīæĭ© +çĶŁæ´» ä¸Ń +ç§į ç±» +Ġsm art +on str +ve y +çĶ ² +Ġreg ularly +ĠS m +æĦŁ è§ī +Ġthough t +Ġex h +c ure +ç» ĺ +认 è¯Ĩ +Ġo ld +æĦ ī +ç§° 为 +Ġfiel ds +Ġcons ist +ã ģ +ç»Ĩ èĥŀ +Ġh ours +8 0 +al king +è§ī å¾Ĺ +ç» Ŀ +ä½ł 们 +ĠEng lish +Ġsignificant ly +Ġs ource +Ġan t +Ġeducation al +Ġtas k +Ġhand le +æIJ ľ +ĠS p +Ġcall ed +Ġter ms +æ² ī +Ġw in +duct ion +Ġmod ern +Ġcuis ine +å¥ Ĺ +è§ ¦ +olut ely +ç« ¥ +p ite +Ġf elt +Ġcomp re +Ġw ond +è¿IJ è¡Į +Ġres il +缸 ä¼¼ +éĩij èŀį +çα æĥħ +ç¬ Ķ +èĪ ª +è° Ī +åĬĽ çļĦ +æľī æīĢ +æ½ ľ +ul ate +Ġdetect ion +宣 ä¼ł +Ġmat ter +éĩı åŃIJ +W rite +ç»ĵ åIJĪ +ç»ı è¿ĩ +Ġdevelop ers +è ª +Ġ --- +人 éĻħ +çŃ ¾ +ï¼ļ âĢľ +Ġinnov ative +ãĢĤ âĢĿ +å½ ¼ +é¥ ¼ +è¿ĩ 度 +Ġplan et +åħ ° +å¸ ģ +æķ ¬ +Ġleg al +Ġlo t +æĪIJ为 äºĨ +i ate +Ġm is +åģĩ 设 +çļĦ æĸĩ竳 +ĠCom pan +Ġd oc +Ġcare ful +Ġe ver +æĪij们 å°Ĩ +ä¾ĭ åŃIJ +ä¹ ³ +ä½ľ èĢħ +åIJ § +æļ ´ +Ġrem ember +缮 çļĦ +Ġp ut +常è§ģ çļĦ +Ġf est +建 设 +å®ŀ ç͍ +Ġact ive +çª Ĺ +ou th +åİŁ çIJĨ +Ġtry ing +è¿ · +缸 åIJĮ +éħĴ åºĹ +An other +æľĢ ä½³ +Ġanaly tics +Ġper pet +ip ment +Ġ å¦Ĥæŀľ +è§Ĥ ä¼Ĺ +Ġc elebr +Ġhe av +Ġmed itation +大 æ°Ķ +A nd +ä¸į éĶĻ +Ġwhe ther +s et +Ġdem onstr +ä¸Ģ 款 +æĶ¶ éĽĨ +éĻIJ åζ +Ġ ing +Ġrev olution +çľ ģ +Ġsc ience +缮 åīį +Ġthink ing +± ä¹IJ +课 ç¨ĭ +Ġp ack +Ġim age +lo c +Ġst ories +uc k +Ġsatis faction +Ġcollect ion +h o +èµ ŀ +éĿ¢ 临 +Ġl a +Ġsym bol +Ġem b +Ġhabit ats +Ġlow er +Ġcontin ues +éľ ĩ +åĵ Ī +ĠT ake +Ġenviron ments +Ġth ree +Ġen c +ĠA cc +æĦı åij³ +åİ ¨ +ch an +ĠH um +Ġtr ue +åĪĩ æĪIJ +s ing +âĢĶ âĢĶ +åĩº æĿ¥ +Ġreg ion +Ġinter pre +Ġdiagnos is +é ŀ +Ġdo ing +Ġr un +Ġco ffee +Ġmaj or +Ġmindful ness +Ġafford able +çĻ ¾ +Ġdetail ed +éĿŀ常 éĩįè¦ģçļĦ +çļĦæ² ŁéĢļ +çļĦæķ ħ +åĢĴ åħ¥ +Ġthem es +Ġnet work +ï¼ī ï¼ļ +ĠUn ited +çļĦæĮ ĩ +ort s +åį« çĶŁ +Ġplan ning +æĥ ł +åī ª +ĠPro v +çļĦ åºĶç͍ +Ġp eri +Ġaccount able +çī Ļ +çļĦç ģ +Ġcho ice +ĠC omm +id ents +çļĦ å®īåħ¨ +å¹¶ ä¸į +太éĺ³ ç³» +Ġrece ive +Ġclo se +çļĦæĹ¶ åĢĻ +Ġchang ing +ä»·å̼ è§Ĥ +Ġperpet u +Ġse ason +Ġm en +Ġlearn ed +Ġsitu ation +Ġre place +he ad +让 æĪij +åľ¨ ä¸Ģèµ· +çļĦç© º +éľ ² +Ġen ough +å±ķ çݰ +Ġlead ers +an cing +Ġtemper ature +åı « +Ġ3 0 +æĦıåij³ çĿĢ +æ± ĩ +ĠGo vern +Ġfocus ed +u ro +Ġsim ple +Ġh iking +æ¯ Ĵ +Ġcompre hens +äº Ī +Ġcreat ed +con d +é¡ µ +ĠW or +è¯ģ æį® +Ġwork place +Ġcharact ers +çļĦ 设计 +Ġme chan +ĠD is +ç¥ŀ ç§ĺ +å· ŀ +ĠO n +< / +ç§į æ¤į +Ġpat h +Ġlim ited +Ġsol ar +çļĦæ ı +2 2 +Ġappreci ate +å¿« ä¹IJ +æĦŁ åıĹåΰ +èĢ Ĺ +m ed +ic ine +Ġnot e +å½ĵ åīį +æĪij们 åºĶ该 +Ġse en +ä¸Ģ åIJį +å°½ åı¯èĥ½ +è¿IJ ç®Ĺ +è§Ĵ 度 +Ġequ ipment +Ġsp read +è ¸ +è® ¿ +åı¥ è¯Ŀ +æĮ ¥ +Ġpur pose +请 ä½ł +Y our +ari an +ä» ª +Ġperspect ives +åĩº äºĨ +å©ļ 礼 +Ġexcell ent +ĠEns uring +Ġre ach +éĺ¶ æ®µ +ä¿Ŀ éļľ +Ġemp athy +ĠM y +çij ľä¼½ +Ġ ver +ab el +ĠPre dict +Ġmain tenance +è¯Ħ ä»· +Ġ ult +åĴ ¨ +o x +åĴ¨ 询 +Ġshare d +in a +l ist +Ġoutdo or +Ġthough ts +in ating +éĴ ± +Ġfra me +éĺ ¿ +åĪ© 润 +çļĦæİ ¨ +åį ļ +Ġrec ent +Ġal tern +are d += = +Ġro ad +äºĭ 项 +g ed +y nt +Ġspe nd +ç½ ª +åıĸ å¾Ĺ +é ¹ +l i +æĹ¶ æľŁ +严 éĩį +å¿ Ĩ +å© ´ +æİ¥ ä¸ĭæĿ¥ +ĠEar th +ĠChat bots +Ġset ting +ç¥ Ŀ +éĶĢåĶ® é¢Ŀ +ä¼ ¦ +Ġread ing +æİ¢ 讨 +a ign +éŀ ĭ +Ġyou ng +Ġcare er +Ġteac hers +çļĦ è´¨éĩı +å±ŀ äºİ +Ġeas ier +Ġscient ific +ç¾İ åħĥ +Ġsp ir +åĬ ³ +çļĦæĶ ¯ +r ist +èµĦ 产 +çĶŁ åŃĺ +èĩ³ å°ij +å§ ¿ +Ġvide o +Ġa im +å®Ŀ å®Ŀ +çζ æ¯į +________ ________ +al ities +Ġb ud +Ġstre et +Ġ æĺ¯ +æĸ¹ ç¨ĭ +ä¸ĸ 纪 +c hes +ear ch +æĴ ° +Ġeng ine +Ġdis placement +ĠRo bots +erv ised +é¡ ¶ +ou d +Ġw alk +Ġemerg ency +èģ ĺ +n al +Ġdat as +åĢ º +åIJİ çļĦ +å¾Ī 好 +Ġmy self +çļĦæī ĭ +Ġus age +Ġsh own +æ® Ĭ +Ġtyp ically +u ly +æĸ° éĹ» +æĽ ¿ +Ġor ig +è½» æĿ¾ +æĺ¾ 示 +Ġado pt +èĤ¡ 票 +Ġp arent +a ps +æĢĿ æĥ³ +Ġmarket ing +èĻ « +éĥ¨ éŨ +çļĦæķ Ī +Ġcomfort able +åŃ¦ä¹ł åĴĮ +Ġfore cast +ict ion +Ġget ting +Ġtre es +av ing +çļĦ åŁºç¡Ģ +read y +æĸ° é²ľ +go ing +¹ é¥ +Ġev idence +¹é¥ ª +ç§ ĭ +æľī å¾Īå¤ļ +éĿ¢ è¯ķ +éģĩ åΰ +ç»Ļ å®ļ +ir c +åı¯ä»¥ æł¹æį® +驾 é©¶ +å·§ åħĭ +Ġst unning +çļĦæ ¦Ĥ +æ¡ Į +ĠJ ohn +ul ation +åıĤ èĢĥ +Ġf lex +çĦ¦ èĻij +ym akers +Ġfor ms +s h +v al +ĠS o +c o +æİ¨ åĬ¨ +èħ ¿ +çī¹ æ®Ĭ +Ġen ab +å°Ĩ ä¼ļ +æĶ¯ åĩº +åĿļ æĮģ +红 èī² +Ġopt ion +Ġstart ed +r ation +Ġpo etry +Ġp ort +g en +èª ī +Ġdel iv +çĶ ļ +éĢ » +éĢī 项 +Ġg round +å½¼ æŃ¤ +an a +çļĦæĹ ¥ +åľ¨ 线 +Ġse cure +Ġ æł¹æį® +饮 æĸĻ +Ġgr atitude +第 ä¸ī +Ġs ong +Ġpoint s +Ġal ready +çļĦçĪ ± +ĠTe chn +Ġreal ity +çı Ń +Ġs ince +Ġpopul ation +y ond +b or +ĠSoc ial +æıIJ åıĸ +å·¥ ç¨ĭ +a ff +交 æĺĵ +Ġwor th +å¡ « +å¨ ±ä¹IJ +Ġdo g +ĠAr t +ç¡ ¬ +æµ· æ´ĭ +åĨ Ĵ +çī Ī +Ġprogramm ing +ĠAs s +ĠM achine +å̼ å¾Ĺ +请 è¾ĵåħ¥ +声 éŁ³ +Ġexercis es +åħī 线 +æ³ķ åĴĮ +Ġfeat ure +e ff +è¿Ľ æŃ¥ +女 æĢ§ +Ġefficient ly +çļĦæĬĢ æľ¯ +Ġgen etic +令 人 +è´ ¦ +çļĦ 产åĵģ +åİ ļ +åĴĮ æĸĩåĮĸ +éĻ Ħ +Ġmo b +综 åIJĪ +t ers +æľī ä¸Ģ +å¦ Ĩ +åį Ī +Ġout side +Ġprop ert +éĤ® ä»¶ +主 ä¹ī +Ġpolic y +èĩª 身 +Ġnav igate +Ġst y +ç͵ èĦij +Ġab ilities +Ġfac ed +çļĦç ¼ +çļĦ å°ı +è ķ +Ġt one +ig ation +åıĤ æķ° +èĽĭçϽ è´¨ +ä½ Ľ +çĶļ èĩ³ +Ġsk in +èĴ ¸ +æĭ Ľ +éŃ Ķ +ash ion +Ġing red +æĹ ĭ +Ġcamp aign +Ġm ount +Ġcons id +Ġmus e +n ter +w ater +ä¼ļ è®® +Ġprotect ion +ä¿Ŀ éĻ© +Ġcro ps +og le +éļı æĹ¶ +æļ Ĺ +i um +ä¹ ı +Ġdi et +l ies +ç͍ æĿ¥ +ĠEn coura +æĬ Ĺ +ap an +éĺ² æŃ¢ +W ow +çļĦ åŁºæľ¬ +å¹³ æĸ¹ +Ġst ep +åı¯ éĿł +表 æĺİ +Ġpredict ions +Ġsym pt +Ġdiagnos es +åħ¬ åĽŃ +Ġsupp ly +Ġprev ious +ç»Ħ åIJĪ +. , +çļĦ è¿ĩç¨ĭ +æķ ı +s u +ar is +çķ ħ +oc ol +æIJľ ç´¢ +it le +éĨ Ĵ +顾 客 +éĢ» è¾ij +éĿŀ常 éĩįè¦ģ +ĠB i +å·¦ åı³ +am m +Ġevery thing +æĺ ł +Ġincre d +Ġpe ace +èľ ľ +Ġmuse um +çĭ¬ ç«ĭ +Ġcomprehens ive +Ġr ates +/ / +Ġra d +åĦ¿ ç«¥ +çī¹ èī² +ĠPredict ive +å¼ķ åĬĽ +l er +å° ¤ +ic ro +è¡ ¥ +Ġdeterm ine +çļĦ åĨħ容 +Ġcom pl +Ġgreen house +èħ IJ +Ġhigh light +Ġpart ners +Ġdo ct +çļĦ 使ç͍ +æŃĮ æĽ² +æĮĩ åįĹ +ĠA f +æľº æŀĦ +éĢ Ģ +Ġpoem s +å¿ĥ åĴĮ +Ġatt end +çļĦæ¸ ¸ +Ġs ide +al es +Ġmention ed +ĠA bs +Ġhistor ical +Ġle ft +以ä¸ĭ åĩłä¸ª +åıĹ æ¬¢è¿İ +èıľ åĵģ +Ġrem ain +æ ĩ +Ġtour s +ł éģĵ +Ġerr ors +æľº åζ +æ ¦ +æĤ£ èĢħ +m ore +Ġexpert s +çļĦçł Ķç©¶ +ç»ĵ æĿŁ +Ġwrit ten +çł Ķ +Ġe t +in put +æ°Ķ ä½ĵ +è ļ +æĥ Ĭ +Ġa ge +éĩį å¤į +å¼ ¹ +åŃ ¤ +Ġsympt oms +Ġbelie f +' d +i ol +Ġ1 8 +åħħ è¶³ +çı į +force ment +æĸ Ĺ +ª èĮĦ +Ġ1 5 +ä¸Ģ个 人 +Ġapp lic +è´ ¥ +ä½į äºİ +éϤ äºĨ += " +ä¸ī è§Ĵ +æĢĿ ç»´ +åį · +Ġf ru +ĠCol labor +Ġpr im +Ġrequire d +Ġw atch +è°ĥ åij³ +ç»ĵ 论 +on y +Ġgu ide +Ġm ax +ĠC ould +Ġadv ent +ĠO verall +çļĦæĬ ķ +Ġexp er +å ĺ +ic ial +ost er +çļĦ é¢ľèī² +Ġoper ations +éĥ ģ +Ġm oney +le y +c ling +Ġo il +çļ® èĤ¤ +Ġg e +Ġb at +ĠP h +Ġsc he +Ġelect ric +v est +Ġch ain +Ġcap abilities +ir d +è¯ģ æĺİ +æľĢ 好 +iv il +Ġdepend ing +Ġs ave +Ġpract ical +Ġcult ures +缸åºĶ çļĦ +s y +çļĦç ² +Ġbeh ind +æĹ¶éĹ´ åĴĮ +å¹ ħ +ĠA g +Ġeffect iveness +A d +ĠO f +Ġany thing +å·§åħĭ åĬĽ +Ġm ist +Ġlangu ages +ĠM ake +å « +æ£ ® +ĠCon t +ĠAbs olutely +Ġinvest ment +m at +çļĦæķħ äºĭ +æ¬ § +Ġspe ed +çļĦæ¸ © +Ġc ities +åĨĻ ä½ľ +Th anks +Ġd ed +åĪĨ éħį +Ġd ark +Ġsupport ing +å¹ ķ +ĠK e +éĽ ¶ +Ġsh aring +Ġh ouse +认 çŁ¥ +Ġsurround ing +Ġredu ced +Ġf u +Ġst or +Ġab s +T om +c ent +ĠEduc ation +Ġth r +ot t +ĠTh at +Ġhe ar +un g +Ġbe yond +ĠC o +ro om +è¯Ĺ æŃĮ +re me +Ġlit tle +Ġg ames +ä¹ĭ åIJİ +éĥ½ ä¼ļ +è¯Ń éŁ³ +ç¬ ij +çī¹ å®ļ +第 ä¸Ģ +Ġdep ression +Ġinnov ation +ĠF r +Ġcomput er +c an +å³ ° +ç¼ĸåĨĻ ä¸Ģ个 +Ġintern ational +Ġcan cer +åѦ èĢħ +Ġdisc over +he t +Ġcomp os +Ġrec y +Ġ2 00 +åIJ« æľī +çĹ Ľ +ç¼ĵ è§£ +Ġfre qu +çĶ ³ +ĠM ar +çļĦ éĢīæĭ© +Ġun t +Ġreg ions +Ġop in +ĠGovern ments +æ¶ Ĥ +åĨħ å¿ĥ +ä¸Ĭ æľĢ +ä»į çĦ¶ +l ier +æ³ ³ +äºĴ 缸 +ĠSt ud +az on +Ġar ch +Ġche m +çļĦ èĥ½åĬĽ +çļĦ ä¸Ģ个 +Ġa p +Ġre d +Ġw omen +Ġpro te +Ġfind ing +å§ » +éĢĤå½ĵ çļĦ +Ġfor ward +对 象 +Ġwa it +Ġconsid ered +du le +b acks +Ġclin ical +åħ· å¤ĩ +éº ¦ +Ġon going +åĨ Ľ +Ġf ar +åĴĮ è° +XX X +Ġpolit ical +Ġcam er +çļĦ è¡Į为 +æĦı 大åĪ© +Ġapp s +åĩı è½» +Ġread ers +å©ļ å§» +æ° ¸ +o res +åħ¨ éĿ¢ +ĠAf ric +Ġfavor ite +Ġm ill +Ġd ang +ĠSt ates +åĢ Ł +å¯ ¿ +Ġl at +è¿ĩ åİ» +Ġtr uly +åĽŀçŃĶ éĹ®é¢ĺ +Ġco gn +ä» ° +ĠJ apan +iz z +çļĦæĿ IJ +x x +é¢ĺ 缮 +ri ption +éĤ£ äºĽ +Ġbud get +Ġv ast +éļIJ ç§ģ +Ġpolic ymakers +è¿ĺ éľĢè¦ģ +å¹¶ æıIJä¾Ľ +Ġswe et +Ġgener al +æ» ¤ +Ġbir ds +Ġpl astic +Ċ ĉ +åĪ º +ment al +Ġincl usive +Ġtop ics +Ġs low +ä½ł èĥ½ +è¶³å¤Ł çļĦ +è§Ĩ è§ī +w w +Ġ 使ç͍ +æī ¹ +æ¦Ĥ 念 +é£Ł ç͍ +èĢ ³ +c ks +Ġfra ud +Ġingred ients +Ġf asc +åĮĹ äº¬ +Ġf r +Ġmanufact uring +Ġ ä½ľä¸º +Ġbe ach +é¡ ¿ +eri ous +å¤ĸ è§Ĥ +é¢Ħ éĺ² +æĿ¥ èĩª +èĤĮ èĤī +Ġd ays +Ġass ign +Ġadv ant +Ġteam s +é¢ Ĺ +now n +ĠP o +} { +Ġmin ut +it ions +Ġeas ily +ĠB l +n ame +åѦ æł¡ +Ġrespons ibility +åıij æĮ¥ +Ġsens itive +çŃī äºİ +ci ous +Ġs ou +å± ı +Ġr ich +å½ĵ çĦ¶ +m an +Ġinterpre t +2 4 +Ġshow s +èģĮ åľº +Ġf all +è½ ½ +丰å¯Į çļĦ +( ' +ä¿® æĶ¹ +æĽ´ æį¢ +A l +åı¯èĥ½ æĺ¯ +Ġr ate +Ġprotect ing +f it +Ġ5 0 +Ġmove ment +è§ Ī +Ġemploy ee +Ġdis ord +åĪĽ æĦı +产åĵģ çļĦ +æľ Ŀ +ĊĠĠĠĠĠĠĠĠ ĠĠĠĠĠĠĠ +Ġpre d +Ġoffer ing +åįģ åĪĨ +èĢĮ ä¸įæĺ¯ +Th ank +æĽ ¾ +Ġele ments +ç² Ĵ +Ġcour ses +Ġintegr ated +ĠC ar +agra ph +åŁº åĽł +Ġinst ead +èĦ ± +åı¦ ä¸Ģ个 +å¯Ĩ çłģ +Ġallow ed +éĿ¢ åĮħ +çķ ªèĮĦ +åĴĮ åıijå±ķ +å° ģ +Ġconnect ion +åľ¨ ä¸Ģ个 +Ġuse ful +è¯Ń åı¥ +åĪĨ å¸ĥ +表 æ¼Ķ +æľī æĹ¶ +çļĦæĹ ħ +çļĦæĢ » +Ġf ashion +èĭ ¦ +è¦ģ 注æĦı +çĶŁ ç´ł +Ġnut ri +èĩª è¡Į +çļĦç ĭ +çIJĨè§£ åĴĮ +Ġc at +æľºåύ åŃ¦ä¹ł +Ġexh ib +åĴĮ æľįåĬ¡ +fra c +e pend +Ġimpact ed +Ġ ut +æķ° ç»Ħ +ĠWor ld +Ġansw er +ers e +éª ¨ +Ġart ists +åŃ©åŃIJ çļĦ +ä» Ķ +çĻ » +ĠA re +Ġco ol +Ġcogn itive +åIJĦ 个 +l ike +å©´ åĦ¿ +åĪĹ åĩº +å¹ » +ron t +å®¶ éķ¿ +缺 ä¹ı +Ġcy ber +il t +Ġcapt ure +å Ĺ +åľ¨ äºİ +Ġthreat s +åĴĮ 社ä¼ļ +Ġcell s +æ¸ħ åįķ +ĠV is +æİ ī +Ġh ol +åŃIJ çļĦ +C h +è Ŀ +Ġs aid +Ġd ream +un ch +un e +ĠD on +å®¶ 人 +ç± į +æĦŁ åĴĮ +Ġexperi enced +çļĦéĩįè¦ģ æĢ§ +å¼ ĥ +um p +éĺ IJ +Ġhabit at +è¢ ĭ +Ġj o +ç®Ģ æ´ģ +Ġb ur +Ġvisit ors +éĽ ħ +çļĦçŁ ¥ +Ġent ire +讲 è¿° +äºĨ ä¸ĢäºĽ +åįı ä½ľ +ĠB us +å° ¾ +çļĦæķ Ļ +olo g +Ġsign s +Ġspeak er +çļĦ éŁ³ä¹IJ +Ġno vel +å±ħ æ°ij +çļĦ åıĺåĮĸ +å°½ éĩı +Ġspir it +å®Į ç¾İ +è´ · +å¿ħè¦ģ çļĦ +ie f +示 ä¾ĭ +Ġd iv +æķ´ æķ° +Ġeconom y +Ġethical ly +éĻ Ī +Ġschool s +Ġnet works diff --git a/model/minimind_tokenizer/tokenizer.json b/model/minimind_tokenizer/tokenizer.json new file mode 100644 index 0000000..dcfabc6 --- /dev/null +++ b/model/minimind_tokenizer/tokenizer.json @@ -0,0 +1,12603 @@ +{ + "version": "1.0", + "truncation": null, + "padding": null, + "added_tokens": [ + { + "id": 0, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 1, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + }, + { + "id": 2, + "content": "", + "single_word": false, + "lstrip": false, + "rstrip": false, + "normalized": false, + "special": true + } + ], + "normalizer": null, + "pre_tokenizer": { + "type": "ByteLevel", + "add_prefix_space": false, + "trim_offsets": true, + "use_regex": true + }, + "post_processor": null, + "decoder": { + "type": "ByteLevel", + "add_prefix_space": true, + "trim_offsets": true, + "use_regex": true + }, + "model": { + "type": "BPE", + "dropout": null, + "unk_token": null, + "continuing_subword_prefix": null, + "end_of_word_suffix": null, + "fuse_unk": false, + "byte_fallback": false, + "ignore_merges": false, + "vocab": { + "": 0, + "": 1, + "": 2, + "!": 3, + "\"": 4, + "#": 5, + "$": 6, + "%": 7, + "&": 8, + "'": 9, + "(": 10, + ")": 11, + "*": 12, + "+": 13, + ",": 14, + "-": 15, + ".": 16, + "/": 17, + "0": 18, + "1": 19, + "2": 20, + "3": 21, + "4": 22, + "5": 23, + "6": 24, + "7": 25, + "8": 26, + "9": 27, + ":": 28, + ";": 29, + "<": 30, + "=": 31, + ">": 32, + "?": 33, + "@": 34, + "A": 35, + "B": 36, + "C": 37, + "D": 38, + "E": 39, + "F": 40, + "G": 41, + "H": 42, + "I": 43, + "J": 44, + "K": 45, + "L": 46, + "M": 47, + "N": 48, + "O": 49, + "P": 50, + "Q": 51, + "R": 52, + "S": 53, + "T": 54, + "U": 55, + "V": 56, + "W": 57, + "X": 58, + "Y": 59, + "Z": 60, + "[": 61, + "\\": 62, + "]": 63, + "^": 64, + "_": 65, + "`": 66, + "a": 67, + "b": 68, + "c": 69, + "d": 70, + "e": 71, + "f": 72, + "g": 73, + "h": 74, + "i": 75, + "j": 76, + "k": 77, + "l": 78, + "m": 79, + "n": 80, + "o": 81, + "p": 82, + "q": 83, + "r": 84, + "s": 85, + "t": 86, + "u": 87, + "v": 88, + "w": 89, + "x": 90, + "y": 91, + "z": 92, + "{": 93, + "|": 94, + "}": 95, + "~": 96, + "¡": 97, + "¢": 98, + "£": 99, + "¤": 100, + "¥": 101, + "¦": 102, + "§": 103, + "¨": 104, + "©": 105, + "ª": 106, + "«": 107, + "¬": 108, + "®": 109, + "¯": 110, + "°": 111, + "±": 112, + "²": 113, + "³": 114, + "´": 115, + "µ": 116, + "¶": 117, + "·": 118, + "¸": 119, + "¹": 120, + "º": 121, + "»": 122, + "¼": 123, + "½": 124, + "¾": 125, + "¿": 126, + "À": 127, + "Á": 128, + "Â": 129, + "Ã": 130, + "Ä": 131, + "Å": 132, + "Æ": 133, + "Ç": 134, + "È": 135, + "É": 136, + "Ê": 137, + "Ë": 138, + "Ì": 139, + "Í": 140, + "Î": 141, + "Ï": 142, + "Ð": 143, + "Ñ": 144, + "Ò": 145, + "Ó": 146, + "Ô": 147, + "Õ": 148, + "Ö": 149, + "×": 150, + "Ø": 151, + "Ù": 152, + "Ú": 153, + "Û": 154, + "Ü": 155, + "Ý": 156, + "Þ": 157, + "ß": 158, + "à": 159, + "á": 160, + "â": 161, + "ã": 162, + "ä": 163, + "å": 164, + "æ": 165, + "ç": 166, + "è": 167, + "é": 168, + "ê": 169, + "ë": 170, + "ì": 171, + "í": 172, + "î": 173, + "ï": 174, + "ð": 175, + "ñ": 176, + "ò": 177, + "ó": 178, + "ô": 179, + "õ": 180, + "ö": 181, + "÷": 182, + "ø": 183, + "ù": 184, + "ú": 185, + "û": 186, + "ü": 187, + "ý": 188, + "þ": 189, + "ÿ": 190, + "Ā": 191, + "ā": 192, + "Ă": 193, + "ă": 194, + "Ą": 195, + "ą": 196, + "Ć": 197, + "ć": 198, + "Ĉ": 199, + "ĉ": 200, + "Ċ": 201, + "ċ": 202, + "Č": 203, + "č": 204, + "Ď": 205, + "ď": 206, + "Đ": 207, + "đ": 208, + "Ē": 209, + "ē": 210, + "Ĕ": 211, + "ĕ": 212, + "Ė": 213, + "ė": 214, + "Ę": 215, + "ę": 216, + "Ě": 217, + "ě": 218, + "Ĝ": 219, + "ĝ": 220, + "Ğ": 221, + "ğ": 222, + "Ġ": 223, + "ġ": 224, + "Ģ": 225, + "ģ": 226, + "Ĥ": 227, + "ĥ": 228, + "Ħ": 229, + "ħ": 230, + "Ĩ": 231, + "ĩ": 232, + "Ī": 233, + "ī": 234, + "Ĭ": 235, + "ĭ": 236, + "Į": 237, + "į": 238, + "İ": 239, + "ı": 240, + "IJ": 241, + "ij": 242, + "Ĵ": 243, + "ĵ": 244, + "Ķ": 245, + "ķ": 246, + "ĸ": 247, + "Ĺ": 248, + "ĺ": 249, + "Ļ": 250, + "ļ": 251, + "Ľ": 252, + "ľ": 253, + "Ŀ": 254, + "ŀ": 255, + "Ł": 256, + "ł": 257, + "Ń": 258, + "Ġt": 259, + "Ġa": 260, + "in": 261, + "he": 262, + "re": 263, + "ï¼": 264, + "ä¸": 265, + "on": 266, + "at": 267, + "çļ": 268, + "çļĦ": 269, + "ï¼Į": 270, + "Ġs": 271, + "Ġc": 272, + "nd": 273, + "ãĢ": 274, + "er": 275, + "Ġthe": 276, + "es": 277, + "en": 278, + "or": 279, + "an": 280, + "Ġand": 281, + "ing": 282, + "Ġp": 283, + "it": 284, + "al": 285, + "ãĢĤ": 286, + "Ġo": 287, + "Ġw": 288, + "ä»": 289, + "Ġto": 290, + "is": 291, + "ou": 292, + "Ġm": 293, + "äº": 294, + "Ġin": 295, + "Ġf": 296, + "Ġb": 297, + "ed": 298, + "ion": 299, + "åı": 300, + "ic": 301, + "Ġd": 302, + "Ġof": 303, + "le": 304, + "ar": 305, + "ro": 306, + "ĠĠ": 307, + "åħ": 308, + "ent": 309, + "æľ": 310, + "Ġe": 311, + "åĴ": 312, + "è¿": 313, + "ä½": 314, + "åĴĮ": 315, + "æĪ": 316, + "å®": 317, + "åĪ": 318, + "ve": 319, + "us": 320, + "Ġre": 321, + "Ġh": 322, + "Ġth": 323, + "as": 324, + "ct": 325, + "çĶ": 326, + "om": 327, + "åľ": 328, + "å¤": 329, + "æĺ": 330, + "åĬ": 331, + "åIJ": 332, + "ä¸Ģ": 333, + "im": 334, + "è¯": 335, + "æĸ": 336, + "ation": 337, + "lo": 338, + "ç»": 339, + "Ġbe": 340, + "ãĢģ": 341, + "id": 342, + "Ġcan": 343, + "il": 344, + "æĺ¯": 345, + "ä¹": 346, + "è®": 347, + "ĠA": 348, + "Ġthat": 349, + "ĠT": 350, + "以": 351, + "ch": 352, + "Ġy": 353, + "ce": 354, + "ï¼ļ": 355, + "ot": 356, + "ers": 357, + "Ġn": 358, + "éĢ": 359, + "ra": 360, + "å°": 361, + "Ġg": 362, + "Ġyou": 363, + "åŃ": 364, + "Ġpro": 365, + "et": 366, + "åº": 367, + "åľ¨": 368, + "ly": 369, + "Ġis": 370, + "个": 371, + "Ġl": 372, + "ur": 373, + "Ġfor": 374, + "åı¯": 375, + "éĩ": 376, + "st": 377, + "çļĦæ": 378, + "ut": 379, + "Ġhe": 380, + "if": 381, + "ĥ½": 382, + "ä¼": 383, + "ĠI": 384, + "è¡": 385, + "ir": 386, + "ith": 387, + "å¹": 388, + "Ġare": 389, + "ig": 390, + "Ġst": 391, + "el": 392, + "ol": 393, + "å¸": 394, + "ul": 395, + "æĿ": 396, + "æĪij": 397, + "Ġon": 398, + "è¦": 399, + "æľī": 400, + "æĹ": 401, + "å¯": 402, + "è§": 403, + "è¦ģ": 404, + "Ġus": 405, + "ay": 406, + "æķ": 407, + "çī": 408, + "ow": 409, + "ment": 410, + "ç͍": 411, + "ess": 412, + "ä¸Ń": 413, + "们": 414, + "人": 415, + "åĩ": 416, + "Ġex": 417, + "ĠĠĠĠ": 418, + "åĽ": 419, + "åĮ": 420, + "å¼": 421, + "Ġcon": 422, + "se": 423, + "èĥ½": 424, + "çİ": 425, + "Ġan": 426, + "Ġwith": 427, + "为": 428, + "ate": 429, + "iv": 430, + "am": 431, + "Ġas": 432, + "ure": 433, + "è¿Ļ": 434, + "åĨ": 435, + "çŃ": 436, + "Ġor": 437, + "å·": 438, + "Ġal": 439, + "ies": 440, + "ç§": 441, + "Ġim": 442, + "æĢ": 443, + "ver": 444, + "ab": 445, + "äºĨ": 446, + "Ġsu": 447, + "Ġde": 448, + "ge": 449, + "th": 450, + "åı¯ä»¥": 451, + "èĢ": 452, + "ä¸į": 453, + "å¾": 454, + "ĠAI": 455, + "Ġen": 456, + "éĹ": 457, + "æī": 458, + "ak": 459, + "ive": 460, + "Ġmo": 461, + "å¥": 462, + "éĿ": 463, + "çĽ": 464, + "ity": 465, + "ä¿": 466, + "un": 467, + "è´": 468, + "åį": 469, + "Ġit": 470, + "Ġimp": 471, + "ect": 472, + "æł": 473, + "å½": 474, + "èĩ": 475, + "é¢": 476, + "åĵ": 477, + "æ³": 478, + "ort": 479, + "ad": 480, + "æŀ": 481, + "em": 482, + "Ġcom": 483, + "å¦": 484, + "her": 485, + "ere": 486, + "ĠS": 487, + "ial": 488, + "ĠC": 489, + "ĠThe": 490, + "çIJ": 491, + "çĶŁ": 492, + "æĦ": 493, + "pp": 494, + "æŃ": 495, + "æĸ¹": 496, + "qu": 497, + "Ġwh": 498, + "å¦Ĥ": 499, + "éľ": 500, + "ant": 501, + "Ġle": 502, + "Ġv": 503, + "æĭ": 504, + "æĬ": 505, + "ust": 506, + "æĹ¶": 507, + "çŃī": 508, + "åij": 509, + "对": 510, + "ter": 511, + "ld": 512, + "è¡Į": 513, + "Ġch": 514, + "ud": 515, + "éľĢ": 516, + "æ°": 517, + "æĪIJ": 518, + "Ġ|": 519, + "ac": 520, + "ain": 521, + "iz": 522, + "æı": 523, + "ions": 524, + "Ġha": 525, + "æĽ": 526, + "--": 527, + "æĿ¥": 528, + "ome": 529, + "å¿": 530, + "'s": 531, + "Ġne": 532, + "est": 533, + "ä¾": 534, + "um": 535, + "åΰ": 536, + "åľ°": 537, + "ist": 538, + "âĢ": 539, + "çī©": 540, + "ä¸Ģ个": 541, + "lp": 542, + "æİ": 543, + "èĩª": 544, + "Ġhelp": 545, + "Ġtheir": 546, + "æĶ": 547, + "ä½ľ": 548, + "ä¼ļ": 549, + "æĮ": 550, + "æĪij们": 551, + "nt": 552, + "äºİ": 553, + "åĪĨ": 554, + "res": 555, + "pe": 556, + "åĩº": 557, + "ide": 558, + "æĥ": 559, + "ĠH": 560, + "è¾": 561, + "ĠM": 562, + "ff": 563, + "æ¯": 564, + "od": 565, + "ical": 566, + "Ġwor": 567, + "ä¸Ĭ": 568, + "are": 569, + "æĽ´": 570, + "Ġyour": 571, + "ä¸ĭ": 572, + "èµ": 573, + "ations": 574, + "æķ°": 575, + "Ġte": 576, + "åİ": 577, + "çIJĨ": 578, + "ĠTh": 579, + "è¿ĩ": 580, + "å¹¶": 581, + "du": 582, + "éĿ¢": 583, + "Ġad": 584, + "ill": 585, + "æµ": 586, + "好": 587, + "oc": 588, + "act": 589, + "éľĢè¦ģ": 590, + "ä»ĸ": 591, + "å±": 592, + "Ġr": 593, + "Ġmore": 594, + "åѦ": 595, + "ç®": 596, + "igh": 597, + "äºĽ": 598, + "ĠB": 599, + "åĬ¨": 600, + "åĵģ": 601, + "èī": 602, + "ple": 603, + "Ġinc": 604, + "åIJĮ": 605, + "Ġexp": 606, + "ould": 607, + "ä½ł": 608, + "æį": 609, + "æıIJ": 610, + "大": 611, + "çݰ": 612, + "pt": 613, + "ĠP": 614, + "all": 615, + "åĬł": 616, + "ç§į": 617, + "Ġse": 618, + "åĬĽ": 619, + "out": 620, + "Ġhave": 621, + "çº": 622, + "ä½ĵ": 623, + "Ġprov": 624, + "åĮĸ": 625, + "å¤ļ": 626, + "å®ļ": 627, + "Ġused": 628, + "éĢļ": 629, + "cc": 630, + "è¿Ľ": 631, + "æ´": 632, + "Ġsh": 633, + "Ġab": 634, + "os": 635, + "Ġres": 636, + "ĠThis": 637, + "ç¨": 638, + "æĢ§": 639, + "age": 640, + "ri": 641, + "æ¸": 642, + "able": 643, + "åŃIJ": 644, + "Ġby": 645, + "åıij": 646, + "éĩı": 647, + "åºĶ": 648, + "Ġlo": 649, + "使": 650, + "åħ¶": 651, + "é«": 652, + "éĻ": 653, + "é«ĺ": 654, + "度": 655, + "è§£": 656, + "é£": 657, + "å°Ĩ": 658, + "æ³ķ": 659, + "and": 660, + "ä¿Ŀ": 661, + "ans": 662, + "for": 663, + "rom": 664, + "reat": 665, + "Ġpl": 666, + "çļĦç": 667, + "常": 668, + "è½": 669, + "Ġwe": 670, + "表": 671, + "ake": 672, + "æĪĸ": 673, + "é¢ĺ": 674, + "åŁ": 675, + "Ġme": 676, + "æĸĩ": 677, + "ther": 678, + "ke": 679, + "å®¶": 680, + "åIJĪ": 681, + "æľĢ": 682, + "ine": 683, + "Ġsome": 684, + "ç±": 685, + "éĩį": 686, + "æŀľ": 687, + "ĠW": 688, + "ĠE": 689, + "éĺ": 690, + "our": 691, + "rou": 692, + "çĤ": 693, + "æ±": 694, + "åħ³": 695, + "Ġint": 696, + "ance": 697, + "ä¹Ł": 698, + "éģ": 699, + "ĠĠĠ": 700, + "å®ĥ": 701, + "ag": 702, + "æ¬": 703, + "00": 704, + "è°": 705, + "ult": 706, + "yst": 707, + "éĹ´": 708, + "ç³": 709, + "Ġtr": 710, + "pl": 711, + "art": 712, + "æĦŁ": 713, + "æĤ": 714, + "ata": 715, + "ĠF": 716, + "form": 717, + "计": 718, + "Ġfrom": 719, + "ĠD": 720, + "éĹ®": 721, + "ight": 722, + "ces": 723, + "æį®": 724, + "lop": 725, + "ä¹ĭ": 726, + "Ġfe": 727, + "åģ": 728, + "velop": 729, + "Ġ1": 730, + "åĽł": 731, + "ks": 732, + "æ²": 733, + "Ġu": 734, + "å°ı": 735, + "ystem": 736, + "Ġdis": 737, + "ĠR": 738, + "gy": 739, + "å·¥": 740, + "ç¨ĭ": 741, + "å¢": 742, + "ence": 743, + "èĤ": 744, + "ç¡": 745, + "Ġtra": 746, + "å»": 747, + "åħ¥": 748, + "ign": 749, + "alth": 750, + "Ġsuch": 751, + "ach": 752, + "æĻ": 753, + "arn": 754, + "Ġdata": 755, + "è¶": 756, + "å®ŀ": 757, + "so": 758, + "Ġdevelop": 759, + "ç¤": 760, + "Ġacc": 761, + "ast": 762, + "èĢĮ": 763, + "Ġ\"": 764, + "Ġother": 765, + "建": 766, + "Ġeff": 767, + "ç«": 768, + "Ġman": 769, + "åħ¬": 770, + "åĢ": 771, + "çĦ": 772, + "ms": 773, + "å¼ı": 774, + "èī²": 775, + "å¾Ĺ": 776, + "ific": 777, + "Ġj": 778, + "Ġro": 779, + "Ġhas": 780, + "chn": 781, + "olo": 782, + "åζ": 783, + "èĬ": 784, + "使ç͍": 785, + "ous": 786, + "ual": 787, + "Ġat": 788, + "Ġem": 789, + "ell": 790, + "Ġsystem": 791, + "Ġhealth": 792, + "ities": 793, + "Ġexam": 794, + "ib": 795, + "éĶ": 796, + "Ġabout": 797, + "产": 798, + "åIJİ": 799, + "æĦı": 800, + "ç±»": 801, + "Ġpre": 802, + "æĤ¨": 803, + "Ġalso": 804, + "ents": 805, + "Ġind": 806, + "ind": 807, + "éĢĤ": 808, + "Ġtechn": 809, + "ress": 810, + "æĥħ": 811, + "éĹ®é¢ĺ": 812, + "Ġuse": 813, + "ï¼Ł": 814, + "Ġincl": 815, + "Ġspe": 816, + "ich": 817, + "ps": 818, + "æľº": 819, + "Ġthey": 820, + "ie": 821, + "Ġhow": 822, + "Ġwork": 823, + "ä¸ļ": 824, + "ç´": 825, + "Ġimpro": 826, + "Ġlearn": 827, + "æĸ°": 828, + "çĤ¹": 829, + "Ġcont": 830, + "ard": 831, + "çĦ¶": 832, + "æľ¬": 833, + "ç³»": 834, + "ç¡®": 835, + "设": 836, + "åħ·": 837, + "éĢī": 838, + "èĢħ": 839, + "éħ": 840, + "gh": 841, + "__": 842, + "Ġnot": 843, + "çľ": 844, + "缸": 845, + "Ġprovide": 846, + "åī": 847, + "ional": 848, + "Ġens": 849, + "ä¸İ": 850, + "è´¨": 851, + "ential": 852, + "ç»ı": 853, + "å¿ĥ": 854, + "ang": 855, + "æŃ¤": 856, + "end": 857, + "Ġpo": 858, + "è¿Ľè¡Į": 859, + "ice": 860, + "Ġ-": 861, + "Ġway": 862, + "å·±": 863, + "Ġ2": 864, + "ime": 865, + "ç½": 866, + "èĩªå·±": 867, + "Ġun": 868, + "bot": 869, + "Ġinclud": 870, + "ated": 871, + "æ°´": 872, + "éķ": 873, + "æĮģ": 874, + "代": 875, + "é¡": 876, + "æīĢ": 877, + "çĿ": 878, + "pport": 879, + "ood": 880, + "ike": 881, + "ru": 882, + "Ġcomm": 883, + "ĠL": 884, + "ä¿¡": 885, + "ĠG": 886, + "çŁ": 887, + "ç͵": 888, + "Ġwas": 889, + "low": 890, + "erv": 891, + "åĮħ": 892, + "ĠĠĠĠĠĠĠĠ": 893, + "Ġwhe": 894, + "dit": 895, + "Ġwhich": 896, + "Ġcomp": 897, + "éª": 898, + "ore": 899, + "ç¾": 900, + "Ġ=": 901, + "çī¹": 902, + "iff": 903, + "ert": 904, + "æģ": 905, + "rit": 906, + "Ġrec": 907, + "åĨħ": 908, + "æĺİ": 909, + "ors": 910, + "Ġpat": 911, + "----": 912, + "æŁ": 913, + "Ġapp": 914, + "ns": 915, + "åĬ¡": 916, + "aly": 917, + "ace": 918, + "æ´»": 919, + "ä¾Ľ": 920, + "av": 921, + "主": 922, + "Ġpers": 923, + "çĥ": 924, + "该": 925, + "Ġmy": 926, + "ç©": 927, + "eri": 928, + "让": 929, + "æĬĢ": 930, + "éķ¿": 931, + "ack": 932, + "ĠN": 933, + "Ġdiff": 934, + "Ġthis": 935, + "åĿ": 936, + "Ġensure": 937, + "å½ĵ": 938, + "Ġout": 939, + "Ġcl": 940, + "Ġk": 941, + "é¦": 942, + "ount": 943, + "çݯ": 944, + "åĬ©": 945, + "Ġtechnolo": 946, + "Ġthese": 947, + "ful": 948, + "éļ": 949, + "æ·": 950, + "ä¸ĢäºĽ": 951, + "Ġsoc": 952, + "å¼Ģ": 953, + "天": 954, + "Ġev": 955, + "Ġredu": 956, + "Ġthem": 957, + "Ġ(": 958, + "éĥ½": 959, + "æĪ·": 960, + "è·": 961, + "åľº": 962, + "æ°Ķ": 963, + "ĠY": 964, + "è¯Ń": 965, + "éĢļè¿ĩ": 966, + "å±ķ": 967, + "Ġco": 968, + "å½±": 969, + "ç¬": 970, + "Ġanaly": 971, + "æ¯Ķ": 972, + "åħ¨": 973, + "Ġimprove": 974, + "ç»ĵ": 975, + "å¹´": 976, + "çķ": 977, + "çĿĢ": 978, + "Ġhum": 979, + "Ġqu": 980, + "ç®Ĺ": 981, + "ĠO": 982, + "é£Ł": 983, + "ility": 984, + "Ġsystems": 985, + "åıĺ": 986, + "ail": 987, + "ç¼": 988, + "çł": 989, + "è¿Ļ个": 990, + "æıIJä¾Ľ": 991, + "ase": 992, + "åŀ": 993, + "ments": 994, + "Ġpot": 995, + "Ġany": 996, + "ä½Ĩ": 997, + "Ġcons": 998, + "ĠIt": 999, + "æł¼": 1000, + "Ġar": 1001, + "æľ¯": 1002, + "éĿŀ": 1003, + "Ġdo": 1004, + "Ġmay": 1005, + "æĭ©": 1006, + "ue": 1007, + "éĢīæĭ©": 1008, + "ry": 1009, + "éĥ": 1010, + "Ġlike": 1011, + "ong": 1012, + "èģ": 1013, + "``": 1014, + "ile": 1015, + "æ±Ĥ": 1016, + "Ġnew": 1017, + "ient": 1018, + "Ġimpact": 1019, + "è¿ĺ": 1020, + "注": 1021, + "ä¹Ī": 1022, + "缮": 1023, + "âĢľ": 1024, + "âĢĿ": 1025, + "ef": 1026, + "ä¾ĭ": 1027, + "Ġpotential": 1028, + "ok": 1029, + "åı¯èĥ½": 1030, + "Ġtrans": 1031, + "Ġact": 1032, + "ï¼ī": 1033, + "Ġspec": 1034, + "æ¶": 1035, + "Ġwill": 1036, + "交": 1037, + "ize": 1038, + "ç¾İ": 1039, + "å¸Ĥ": 1040, + "Ġstud": 1041, + "pon": 1042, + "èº": 1043, + "ä¸įåIJĮ": 1044, + "one": 1045, + "å¾Ī": 1046, + "åıĬ": 1047, + "å¦Ĥæŀľ": 1048, + "çIJĥ": 1049, + "ange": 1050, + "Ġneed": 1051, + "å¤ĸ": 1052, + "ety": 1053, + "aking": 1054, + "请": 1055, + "ater": 1056, + "Ġperson": 1057, + "ident": 1058, + "Ġso": 1059, + "Ġmake": 1060, + "å¹³": 1061, + "å¤Ł": 1062, + "身": 1063, + "ï¼Ī": 1064, + "Ġinform": 1065, + "æ¡": 1066, + "äºĭ": 1067, + "åıĹ": 1068, + "ased": 1069, + "ild": 1070, + "Ġoff": 1071, + "Ġthere": 1072, + "cis": 1073, + "è¢": 1074, + "éĥ¨": 1075, + "æ¯ı": 1076, + "ract": 1077, + "ass": 1078, + "Ġlearning": 1079, + "åĸ": 1080, + "å½¢": 1081, + "ire": 1082, + "ä»İ": 1083, + "bots": 1084, + "èĻ": 1085, + "帮": 1086, + "Ġdes": 1087, + "ĠIn": 1088, + "cess": 1089, + "Ġpe": 1090, + "ify": 1091, + "Ġwho": 1092, + "ä¹ł": 1093, + "æľŁ": 1094, + "Ġexperi": 1095, + "éĤ": 1096, + "Ġsc": 1097, + "ep": 1098, + "ä½ķ": 1099, + "Ġtime": 1100, + "éĿŀ常": 1101, + "æĭ¬": 1102, + "åķ": 1103, + "以ä¸ĭ": 1104, + "éģĵ": 1105, + "Ġcommun": 1106, + "Ġcould": 1107, + "ap": 1108, + "èIJ": 1109, + "è°ĥ": 1110, + "lic": 1111, + "duct": 1112, + "Ġits": 1113, + "cy": 1114, + "说": 1115, + "Ġmed": 1116, + "Ġcol": 1117, + "ular": 1118, + "éĩįè¦ģ": 1119, + "Ġsp": 1120, + "åĪ©": 1121, + "èµ·": 1122, + "Ġprovid": 1123, + "ices": 1124, + "åĻ": 1125, + "æĸĻ": 1126, + "Ġimport": 1127, + "ural": 1128, + "åŃĹ": 1129, + "Ġund": 1130, + "int": 1131, + "Ġover": 1132, + "åı¸": 1133, + "æł¹": 1134, + "é¥": 1135, + "ples": 1136, + "ä»ĸ们": 1137, + "gra": 1138, + "uring": 1139, + "now": 1140, + "åįķ": 1141, + "è¿ĻäºĽ": 1142, + "åīį": 1143, + "å®ī": 1144, + "Ġpr": 1145, + "åĮħæĭ¬": 1146, + "ç»Ļ": 1147, + "The": 1148, + "ä½į": 1149, + "å§": 1150, + "ç´ł": 1151, + "åijĺ": 1152, + "Ġident": 1153, + "åŀĭ": 1154, + "Ġadd": 1155, + "强": 1156, + "æĺ¯ä¸Ģ": 1157, + "ip": 1158, + "gor": 1159, + "Ġsupport": 1160, + "ne": 1161, + "Ġdiffere": 1162, + "åħĥ": 1163, + "Ġass": 1164, + "åĨ³": 1165, + "éĽ": 1166, + "åIJį": 1167, + "Ġgo": 1168, + "Ġtechnology": 1169, + "æĢ»": 1170, + "è®®": 1171, + "Ġinter": 1172, + "Ġinv": 1173, + "Ġour": 1174, + "æķĪ": 1175, + "ustom": 1176, + "Ġrel": 1177, + "ife": 1178, + "åύ": 1179, + "ings": 1180, + "ä»·": 1181, + "Ġpart": 1182, + "被": 1183, + "æīĭ": 1184, + "ary": 1185, + "Ġrespon": 1186, + "ĊĠĠĠ": 1187, + "好çļĦ": 1188, + "ative": 1189, + "帮åĬ©": 1190, + "绣": 1191, + "æĶ¾": 1192, + "ĠHere": 1193, + "çģ": 1194, + "Ġbut": 1195, + "æģ¯": 1196, + "æŃ£": 1197, + "ark": 1198, + "åħ¬åı¸": 1199, + "ory": 1200, + "å¢ĥ": 1201, + "lect": 1202, + "éŁ": 1203, + "æĥ³": 1204, + "é£İ": 1205, + "ating": 1206, + "Ġam": 1207, + "its": 1208, + "æ»": 1209, + "gorith": 1210, + "åĵį": 1211, + "ures": 1212, + "Ġeffect": 1213, + "Ġshould": 1214, + "Ġper": 1215, + "è±": 1216, + "ç²": 1217, + "ict": 1218, + "Ġalgorith": 1219, + "uc": 1220, + "rough": 1221, + "ä»»": 1222, + "ä»¶": 1223, + "Ġbet": 1224, + "ia": 1225, + "Ġanalyz": 1226, + "æł¹æį®": 1227, + "ized": 1228, + "æµģ": 1229, + "è§Ĥ": 1230, + "è£": 1231, + "æłĩ": 1232, + "iron": 1233, + "Ġcustom": 1234, + "Ġreg": 1235, + "Ġpersonal": 1236, + "èĥ½å¤Ł": 1237, + "ics": 1238, + "ivid": 1239, + "çĪ": 1240, + "èµĦ": 1241, + "æŃ¥": 1242, + "容": 1243, + "åĪĽ": 1244, + "èĪ": 1245, + "ä¹IJ": 1246, + "导": 1247, + "gan": 1248, + "èĬĤ": 1249, + "Ġall": 1250, + "ens": 1251, + "ame": 1252, + "ness": 1253, + "Ġup": 1254, + "ĠU": 1255, + "èĢĥ": 1256, + "elf": 1257, + "å̼": 1258, + "å°ij": 1259, + "æľį": 1260, + "ari": 1261, + "thical": 1262, + "viron": 1263, + "èĥ": 1264, + "ord": 1265, + "Ġsign": 1266, + "éĩĮ": 1267, + "ound": 1268, + "ople": 1269, + "åŁº": 1270, + "Ġinformation": 1271, + "Ġidentify": 1272, + "åĽŀ": 1273, + "Ġcre": 1274, + "éŁ³": 1275, + "ible": 1276, + "ub": 1277, + "è¿IJ": 1278, + "Ġlead": 1279, + "游": 1280, + "次": 1281, + "åĨĻ": 1282, + "éĤ£": 1283, + "get": 1284, + "èį": 1285, + "Ġexample": 1286, + "ä¼ĺ": 1287, + "å½±åĵį": 1288, + "ish": 1289, + "xt": 1290, + "æº": 1291, + "éªĮ": 1292, + "ob": 1293, + "客": 1294, + "å¤ĩ": 1295, + "åģ¥": 1296, + "车": 1297, + "社": 1298, + "ividual": 1299, + "ered": 1300, + "les": 1301, + "Ġenviron": 1302, + "Ġpeople": 1303, + "æĺŁ": 1304, + "çĸ": 1305, + "çĭ": 1306, + "Ġdet": 1307, + "æĹł": 1308, + "Ġif": 1309, + "ose": 1310, + "ite": 1311, + "å¢ŀ": 1312, + "éĴ": 1313, + "åIJĮæĹ¶": 1314, + "è¿°": 1315, + "æĸ¹å¼ı": 1316, + "åĽ½": 1317, + "é»": 1318, + "å¤Ħ": 1319, + "Ġexamples": 1320, + "æ®": 1321, + "Ġinto": 1322, + "æĮĩ": 1323, + "Ġhuman": 1324, + "åIJij": 1325, + "示": 1326, + "æķ°æį®": 1327, + "Ġ3": 1328, + "ĠJ": 1329, + "èı": 1330, + "çݯå¢ĥ": 1331, + "als": 1332, + "erst": 1333, + "Ġethical": 1334, + "ç»Ħ": 1335, + "ä¼ł": 1336, + "Ġdifferent": 1337, + "Ġknow": 1338, + "åºı": 1339, + "Ġindividual": 1340, + "æıIJé«ĺ": 1341, + "round": 1342, + "å°±": 1343, + "åıĸ": 1344, + "åŃĺ": 1345, + "两": 1346, + "çŁ¥": 1347, + "ources": 1348, + "ck": 1349, + "å£": 1350, + "ines": 1351, + "è¾¾": 1352, + "Ġmany": 1353, + "æķ´": 1354, + "æł·": 1355, + "ditional": 1356, + "omm": 1357, + "çͱ": 1358, + "éĢł": 1359, + "å®ĥ们": 1360, + "ues": 1361, + "Ġment": 1362, + "Ġimportant": 1363, + "Ġopt": 1364, + "Ġloc": 1365, + "ph": 1366, + "Ġprocess": 1367, + "Ġalgorithms": 1368, + "设计": 1369, + "Ġsocial": 1370, + "very": 1371, + "åĪĻ": 1372, + "ä¾ĭå¦Ĥ": 1373, + "认": 1374, + "Ġaut": 1375, + "Ġserv": 1376, + "gg": 1377, + "产åĵģ": 1378, + "è§Ħ": 1379, + "çľĭ": 1380, + "vel": 1381, + "æĸ¹æ³ķ": 1382, + "Ġben": 1383, + "åĽłæŃ¤": 1384, + "care": 1385, + "per": 1386, + "åĬŁ": 1387, + "建议": 1388, + "Ġpos": 1389, + "æ¤": 1390, + "we": 1391, + "åĮº": 1392, + "iqu": 1393, + "Ġreal": 1394, + "æĹ¥": 1395, + "Ġreduce": 1396, + "af": 1397, + "angu": 1398, + "Ġsk": 1399, + "Ġed": 1400, + "erstand": 1401, + "åĨµ": 1402, + "mot": 1403, + "åħĪ": 1404, + "ç¥": 1405, + "åºĶ该": 1406, + "Ġthrough": 1407, + "Ġconc": 1408, + "åıijå±ķ": 1409, + "è¯ķ": 1410, + "æ¡Ī": 1411, + "Ġenvironment": 1412, + "åı£": 1413, + "Ġadv": 1414, + "åĪ«": 1415, + "Ġbenef": 1416, + "æ¸ħ": 1417, + "åij³": 1418, + "åħī": 1419, + "Ġdevelopment": 1420, + "eng": 1421, + "å¦Ĥä½ķ": 1422, + "管": 1423, + "ivers": 1424, + "åIJĦ": 1425, + "Ġris": 1426, + "row": 1427, + "ergy": 1428, + "计ç®Ĺ": 1429, + "ä¿¡æģ¯": 1430, + "Ġproduct": 1431, + "è¾ĥ": 1432, + "论": 1433, + "èĩªå·±çļĦ": 1434, + "æĬ¤": 1435, + "åıį": 1436, + "åħ¶ä»ĸ": 1437, + "åĪĹ": 1438, + "ç»Ĩ": 1439, + "空": 1440, + "Ġgreat": 1441, + "ear": 1442, + "æºIJ": 1443, + "ject": 1444, + "çĶŁæ´»": 1445, + "ä¸ŃçļĦ": 1446, + "Ġunderstand": 1447, + "èĭ": 1448, + "hat": 1449, + "Ġprogra": 1450, + "çĬ": 1451, + "éĩij": 1452, + "Ġincluding": 1453, + "Ġaccess": 1454, + "ĠĠĠĠĠĠĠ": 1455, + "è¯Ĩ": 1456, + "ç¦": 1457, + "og": 1458, + "è£ħ": 1459, + "Ġart": 1460, + "Ġwrit": 1461, + "Ġincre": 1462, + "Ġph": 1463, + "æĸ¹éĿ¢": 1464, + "Ġpract": 1465, + "Ġusing": 1466, + "项": 1467, + "æİ¥": 1468, + "Ġways": 1469, + "Ġlangu": 1470, + "æĶ¯": 1471, + "Ġchall": 1472, + "åİ»": 1473, + "____": 1474, + "imate": 1475, + "æĸŃ": 1476, + "è¨": 1477, + "Ġwell": 1478, + "ll": 1479, + "Ġpol": 1480, + "æĢģ": 1481, + "Ġra": 1482, + "Can": 1483, + "åİŁ": 1484, + "ber": 1485, + "è¨Ģ": 1486, + "ç«ĭ": 1487, + "Ġgen": 1488, + "éħį": 1489, + "æ·±": 1490, + "te": 1491, + "ä¸ī": 1492, + "ç§ij": 1493, + "ĠFor": 1494, + "线": 1495, + "çħ": 1496, + "æ¼": 1497, + "åķĨ": 1498, + "æĿIJ": 1499, + "Ġsignific": 1500, + "Ġgu": 1501, + "Ġdecis": 1502, + "Ġtrain": 1503, + "Ġag": 1504, + "Ġcreat": 1505, + "å®Į": 1506, + "æĹ¶éĹ´": 1507, + "Ġone": 1508, + "èĦ": 1509, + "Ġnat": 1510, + "åŃ¦ä¹ł": 1511, + "çļĦæķ": 1512, + "ced": 1513, + "Ġwhen": 1514, + "Ġbi": 1515, + "èİ": 1516, + "æĽ´åĬł": 1517, + "ives": 1518, + "port": 1519, + "å·¥ä½ľ": 1520, + "ving": 1521, + "Ġbeen": 1522, + "æĻº": 1523, + "Ġlife": 1524, + "å¼ķ": 1525, + "arm": 1526, + "çİĩ": 1527, + "ç͍æĪ·": 1528, + "ä¹ī": 1529, + "份": 1530, + "è¯Ŀ": 1531, + "iness": 1532, + "com": 1533, + "康": 1534, + "åĩı": 1535, + "ä»Ģ": 1536, + "è¾ĵ": 1537, + "Ġvari": 1538, + "con": 1539, + "Ġmod": 1540, + "ä»Ģä¹Ī": 1541, + "Ġenergy": 1542, + "æĬĢæľ¯": 1543, + "ertain": 1544, + "mm": 1545, + "verall": 1546, + "åĪĴ": 1547, + "Ġrobots": 1548, + "Ġorgan": 1549, + "æİ¨": 1550, + "ants": 1551, + "åĩĨ": 1552, + "ds": 1553, + "æŀģ": 1554, + "çĻ": 1555, + "Ġrequ": 1556, + "Ġess": 1557, + "ç®Ģ": 1558, + "ustain": 1559, + "æ¨": 1560, + "Ġstr": 1561, + "cing": 1562, + "ability": 1563, + "ree": 1564, + "Ġeduc": 1565, + "åİĨ": 1566, + "Ġcreate": 1567, + "åģ¥åº·": 1568, + "Ġdesign": 1569, + "ips": 1570, + "åģļ": 1571, + "èĬ±": 1572, + "ink": 1573, + "èıľ": 1574, + "æī¾": 1575, + "段": 1576, + "æµĭ": 1577, + "ĠV": 1578, + "ĠBy": 1579, + "åĶ": 1580, + "é¦ĸ": 1581, + "è¯į": 1582, + "Ġwhere": 1583, + "Ġdisc": 1584, + "äºĨè§£": 1585, + "ric": 1586, + "ä¸Ķ": 1587, + "è¶³": 1588, + "æĺ¯ä¸Ģ个": 1589, + "arch": 1590, + "积": 1591, + "带": 1592, + "Ġwhile": 1593, + "Ġsignificant": 1594, + "çłģ": 1595, + "æĪ¿": 1596, + "Ġbeing": 1597, + "Ġlanguage": 1598, + "itive": 1599, + "20": 1600, + "Ġanalyze": 1601, + "æĻ¯": 1602, + "èĮ": 1603, + "rib": 1604, + "模": 1605, + "ĠSt": 1606, + "è´¹": 1607, + "'t": 1608, + "Ġhealthcare": 1609, + "Ġexperience": 1610, + "Ġ5": 1611, + "个人": 1612, + "ays": 1613, + "象": 1614, + "plo": 1615, + "Ġwould": 1616, + "èĻij": 1617, + "æĶ¶": 1618, + "é¢Ħ": 1619, + "é¢Ĩ": 1620, + "ä¿ĿæĮģ": 1621, + "ences": 1622, + "åıª": 1623, + "èĩ´": 1624, + "æĪı": 1625, + "Ġmental": 1626, + "Ġfew": 1627, + "ates": 1628, + "è¿ĩç¨ĭ": 1629, + "å®īåħ¨": 1630, + "Ġsustain": 1631, + "Ġwere": 1632, + "太": 1633, + "çĮ": 1634, + "Ġspecific": 1635, + "Ġworld": 1636, + "çŃĶ": 1637, + "```": 1638, + "Ġtake": 1639, + "åħ»": 1640, + "éĢŁ": 1641, + "ever": 1642, + "SS": 1643, + "éĶĢ": 1644, + "Ġbo": 1645, + "hes": 1646, + "Ġmus": 1647, + "æľįåĬ¡": 1648, + "è§Ĵ": 1649, + "ten": 1650, + "æŀIJ": 1651, + "pow": 1652, + "dict": 1653, + "vent": 1654, + "10": 1655, + "çļĦæĹ": 1656, + "ĸçķ": 1657, + "Ġprot": 1658, + "ç½®": 1659, + "Ġhigh": 1660, + "Ġbus": 1661, + "Ġindust": 1662, + "åIJ¦": 1663, + "cial": 1664, + "人们": 1665, + "ĠAs": 1666, + "åijĬ": 1667, + "ade": 1668, + "æĶ¹": 1669, + "çĹ": 1670, + "Ġhad": 1671, + "Ġher": 1672, + "Ġjust": 1673, + "ï¼Ľ": 1674, + "è´Ń": 1675, + "第": 1676, + "éĵ": 1677, + "Ġwater": 1678, + "Ġfood": 1679, + "éĺŁ": 1680, + "aus": 1681, + "Ġchalleng": 1682, + "åħį": 1683, + "æĸĩåĮĸ": 1684, + "Ġmost": 1685, + "é¸": 1686, + "ç½ij": 1687, + "缴": 1688, + "Ġsm": 1689, + "Ġactiv": 1690, + "ploy": 1691, + "Overall": 1692, + "å¿«": 1693, + "ruct": 1694, + "Ġindividuals": 1695, + "å§ĭ": 1696, + "gies": 1697, + "æŁ¥": 1698, + "çα": 1699, + "iety": 1700, + "In": 1701, + "åĪĨæŀIJ": 1702, + "è§Ĩ": 1703, + "温": 1704, + "ç»´": 1705, + "olut": 1706, + "åŁŁ": 1707, + "ommend": 1708, + "Ġcomple": 1709, + "æķĻ": 1710, + "Ġbu": 1711, + "Ġeducation": 1712, + "ather": 1713, + "Ġ4": 1714, + "ting": 1715, + "Ġfind": 1716, + "没": 1717, + "Ġhis": 1718, + "ä¹ĭéĹ´": 1719, + "Ġeffective": 1720, + "Ġatt": 1721, + "Ġrese": 1722, + "èĥ½åĬĽ": 1723, + "åŁİ": 1724, + "Ġallow": 1725, + "Ġav": 1726, + "Ġpromot": 1727, + "æĻºèĥ½": 1728, + "满": 1729, + "åħ±": 1730, + "iew": 1731, + "come": 1732, + "ç³»ç»Ł": 1733, + "Ġrespons": 1734, + "äºĴ": 1735, + "Ġcult": 1736, + "powered": 1737, + "Ġrecommend": 1738, + "èIJ¥": 1739, + "OSS": 1740, + "Ġchange": 1741, + "è¯ģ": 1742, + "ved": 1743, + "æİĴ": 1744, + "è§£åĨ³": 1745, + "ici": 1746, + "ĠHow": 1747, + "Ġfeel": 1748, + "æľĪ": 1749, + "Ġwhat": 1750, + "以åıĬ": 1751, + "Ġsee": 1752, + "åŃ©": 1753, + "bs": 1754, + "Ġsur": 1755, + "æ£": 1756, + "ality": 1757, + "Ġvis": 1758, + "ç¡®ä¿Ŀ": 1759, + "pect": 1760, + "å®ŀçݰ": 1761, + "Ġcare": 1762, + "广": 1763, + "ills": 1764, + "åºŃ": 1765, + "ases": 1766, + "å¤į": 1767, + "åºĶç͍": 1768, + "çļĦæĥ": 1769, + "ards": 1770, + "Ġaddress": 1771, + "Ġcompan": 1772, + "Ġinvol": 1773, + "Ġcustomer": 1774, + "åĽłä¸º": 1775, + "Ġstudents": 1776, + "Ġins": 1777, + "注æĦı": 1778, + "æŀĦ": 1779, + "欢": 1780, + "æµ·": 1781, + "åıĤ": 1782, + "èĩªçĦ¶": 1783, + "é©": 1784, + "ĠThese": 1785, + "wn": 1786, + "æĺĵ": 1787, + "çĬ¶": 1788, + "ren": 1789, + "Ġtreat": 1790, + "Ġbenefits": 1791, + "ĊĠĠĠĠĠĠĠ": 1792, + "对äºİ": 1793, + "æĢĿ": 1794, + "ider": 1795, + "ĠYes": 1796, + "ĠK": 1797, + "åĸľ": 1798, + "Ġke": 1799, + "Ġeng": 1800, + "Ġpop": 1801, + "ost": 1802, + "pare": 1803, + "Ġmon": 1804, + "款": 1805, + "ĠMOSS": 1806, + "Ġemot": 1807, + "Ġac": 1808, + "ç¼ĸ": 1809, + "fore": 1810, + "åı¥": 1811, + "Ġval": 1812, + "ily": 1813, + "Ġiss": 1814, + "èĤī": 1815, + "èĩ³": 1816, + "游æĪı": 1817, + "ween": 1818, + "Ġinclude": 1819, + "Ġprotect": 1820, + "åħ³ç³»": 1821, + "éĻ©": 1822, + "Ġsever": 1823, + "Ġthan": 1824, + "éľĢæ±Ĥ": 1825, + "ç»ĥ": 1826, + "ĠThey": 1827, + "iss": 1828, + "ys": 1829, + "Ġjob": 1830, + "éĺ³": 1831, + "æIJ": 1832, + "Ġbetween": 1833, + "Ġmach": 1834, + "--------": 1835, + "èĢĥèĻij": 1836, + "è´¨éĩı": 1837, + "Ġbusiness": 1838, + "wor": 1839, + "ick": 1840, + "eg": 1841, + "åħħ": 1842, + "ç¯": 1843, + "æĿ¡": 1844, + "ner": 1845, + "apt": 1846, + "Ġappro": 1847, + "Ġplay": 1848, + "没æľī": 1849, + "¤IJ": 1850, + "æľª": 1851, + "æĪĺ": 1852, + "å®¶åºŃ": 1853, + "ãĢĭ": 1854, + "ency": 1855, + "ĠCh": 1856, + "ãĢĬ": 1857, + "Ġproviding": 1858, + "Ġresources": 1859, + "âĢĻ": 1860, + "Ġassist": 1861, + "Ġnatural": 1862, + "è¯Ħ": 1863, + "便": 1864, + "Ġsaf": 1865, + "åħ·æľī": 1866, + "è°¢": 1867, + "çĥŃ": 1868, + "ss": 1869, + "eth": 1870, + "old": 1871, + "Ġperform": 1872, + "Ġseveral": 1873, + "é¤IJ": 1874, + "Ġeach": 1875, + "转": 1876, + "ci": 1877, + "Ġty": 1878, + "Ġpub": 1879, + "æ´»åĬ¨": 1880, + "ocus": 1881, + "çīĮ": 1882, + "è¶Ĭ": 1883, + "åĽ¢": 1884, + "è½»": 1885, + "è¯Ńè¨Ģ": 1886, + "Ġareas": 1887, + "éĩĩ": 1888, + "ft": 1889, + "riend": 1890, + "å·²": 1891, + "å¸Ĥåľº": 1892, + "ition": 1893, + "ients": 1894, + "管çIJĨ": 1895, + "许": 1896, + "人类": 1897, + "身ä½ĵ": 1898, + "ique": 1899, + "Ġpartic": 1900, + "ç»Ń": 1901, + "agement": 1902, + "ves": 1903, + "符": 1904, + "line": 1905, + "红": 1906, + "åIJ¸": 1907, + "Ġpatter": 1908, + "000": 1909, + "社ä¼ļ": 1910, + "åĨħ容": 1911, + "Ġorganiz": 1912, + "ough": 1913, + "Ġve": 1914, + "åŃ©åŃIJ": 1915, + "æĸ½": 1916, + "æ¤į": 1917, + "åĩł": 1918, + "ä½Ĩæĺ¯": 1919, + "Ġaff": 1920, + "Ġnum": 1921, + "lement": 1922, + "èīº": 1923, + "èij": 1924, + "Ġcar": 1925, + "ages": 1926, + "abor": 1927, + "æĺ¯ä¸Ģç§į": 1928, + "Ġinst": 1929, + "èĽ": 1930, + "ä¹ĭä¸Ģ": 1931, + "è·¯": 1932, + "åį³": 1933, + "Ġmain": 1934, + "éļı": 1935, + "How": 1936, + "å¿ħ": 1937, + "ç¨ĭåºı": 1938, + "éŁ³ä¹IJ": 1939, + "red": 1940, + "æ²¹": 1941, + "Ġoffer": 1942, + "ets": 1943, + "ç¢": 1944, + "Ġduring": 1945, + "çļĦ人": 1946, + "æĽ´å¤ļ": 1947, + "Ġdi": 1948, + "代çłģ": 1949, + "èİ·": 1950, + "åħĭ": 1951, + "Ġguid": 1952, + "主è¦ģ": 1953, + "Ġfam": 1954, + "æİ§": 1955, + "éĢļ常": 1956, + "ĠAd": 1957, + "å¤ĦçIJĨ": 1958, + "urn": 1959, + "ower": 1960, + "åij½": 1961, + "æıı": 1962, + "Ġskills": 1963, + "Ġtool": 1964, + "ware": 1965, + "æĸĩæľ¬": 1966, + "Ġpatterns": 1967, + "缮æłĩ": 1968, + "acy": 1969, + "æīĵ": 1970, + "åŁİå¸Ĥ": 1971, + "Ġevery": 1972, + "ries": 1973, + "读": 1974, + "éģ¿": 1975, + "çϽ": 1976, + "éĢĤåIJĪ": 1977, + "Ġpatient": 1978, + "羣": 1979, + "oth": 1980, + "她": 1981, + "åĶ®": 1982, + "ä¸Ģç§į": 1983, + "Ġmade": 1984, + "ä½İ": 1985, + "ise": 1986, + "Ġrem": 1987, + "æ¶Ī": 1988, + "åIJ«": 1989, + "air": 1990, + "Ġgener": 1991, + "oy": 1992, + "ç²¾": 1993, + "æĥħåĨµ": 1994, + "ights": 1995, + "Ġexpl": 1996, + "è§ģ": 1997, + "Ġpredict": 1998, + "ç±³": 1999, + "æĽ´å¥½": 2000, + "ä¿®": 2001, + "Ġclimate": 2002, + "Ġfocus": 2003, + "Ġgrow": 2004, + "客æĪ·": 2005, + "ä¸įæĸŃ": 2006, + "itor": 2007, + "ĠEn": 2008, + "约": 2009, + "æĺ¯åIJ¦": 2010, + "ä»ħ": 2011, + "æĪij们çļĦ": 2012, + "æľĽ": 2013, + "op": 2014, + "Ġmaking": 2015, + "yth": 2016, + "ccess": 2017, + "Ġown": 2018, + "ggest": 2019, + "Ġtas": 2020, + "uture": 2021, + "Ġmodel": 2022, + "put": 2023, + "Ġresearch": 2024, + "erest": 2025, + "éļ¾": 2026, + "Ġ[": 2027, + "iel": 2028, + "ational": 2029, + "Ġcommunic": 2030, + "ç¥ŀ": 2031, + "ç©¶": 2032, + "Ġrest": 2033, + "æĪIJ为": 2034, + "king": 2035, + "pr": 2036, + "åĮ»": 2037, + "cur": 2038, + "èĤ²": 2039, + "Ġ'": 2040, + "è¿Ļç§į": 2041, + "ç¯ĩ": 2042, + "Ġche": 2043, + "own": 2044, + "éĻħ": 2045, + "Ġfin": 2046, + "åĪ¶ä½ľ": 2047, + "Ġsuggest": 2048, + "å¢ŀåĬł": 2049, + "Ġmedia": 2050, + "ribut": 2051, + "çļĦæĥħ": 2052, + "åĬłåħ¥": 2053, + "Ġcle": 2054, + "åij¨": 2055, + "竳": 2056, + "Ġthink": 2057, + "Ġlocal": 2058, + "pportun": 2059, + "ĠYou": 2060, + "Ġplan": 2061, + "Ġeven": 2062, + "éĽĨ": 2063, + "å·§": 2064, + "ax": 2065, + "Ġchallenges": 2066, + "Ġprof": 2067, + "ĠCan": 2068, + "Ġconcer": 2069, + "Ġfuture": 2070, + "åĬ¿": 2071, + "Ġref": 2072, + "èģĶ": 2073, + "Ġself": 2074, + "æĪĸèĢħ": 2075, + "ble": 2076, + "åĽ´": 2077, + "è¿IJåĬ¨": 2078, + "Ġinf": 2079, + "éĩĬ": 2080, + "Ġsustainable": 2081, + "Ġtext": 2082, + "Ġgra": 2083, + "äºĮ": 2084, + "åĵģçīĮ": 2085, + "ä¸įåIJĮçļĦ": 2086, + "led": 2087, + "çĭ¬": 2088, + "Ġopportun": 2089, + "Ġcontin": 2090, + "ym": 2091, + "Ġget": 2092, + "å¯Ĩ": 2093, + "éϤ": 2094, + "æħ": 2095, + "éģ¿åħį": 2096, + "Ġ+": 2097, + "è§ī": 2098, + "Ġret": 2099, + "å¸ĥ": 2100, + "Ġinterest": 2101, + "Ġsociety": 2102, + "ç»ĵæŀľ": 2103, + "åIJ¬": 2104, + "é¦ĸåħĪ": 2105, + "Ġbre": 2106, + "Ġ20": 2107, + "ĠHowever": 2108, + "è®°": 2109, + "ons": 2110, + "è¿ij": 2111, + "å¼Ģå§ĭ": 2112, + "Ġbuild": 2113, + "Ġbeh": 2114, + "'m": 2115, + "vers": 2116, + "Ġgood": 2117, + "çIJĨè§£": 2118, + "resent": 2119, + "离": 2120, + "åĬŁèĥ½": 2121, + "Ġeffort": 2122, + "labor": 2123, + "é»ij": 2124, + "Ġbetter": 2125, + "Ġread": 2126, + "å¾ĭ": 2127, + "èĽĭ": 2128, + "hed": 2129, + "ä¹°": 2130, + "导èĩ´": 2131, + "Ġimplement": 2132, + "ç¿": 2133, + "享": 2134, + "头": 2135, + "ense": 2136, + "Ġlong": 2137, + "other": 2138, + "饮": 2139, + "åŃĺåľ¨": 2140, + "çļĦæĦ": 2141, + "ä¸Ģ份": 2142, + "ython": 2143, + "ning": 2144, + "åĩıå°ij": 2145, + "åĢĻ": 2146, + "ä¸ĵ": 2147, + "åIJĦç§į": 2148, + "èħ": 2149, + "å°½": 2150, + "åįĩ": 2151, + "æĬ¥": 2152, + "Ġpublic": 2153, + "Ġlar": 2154, + "ä½łçļĦ": 2155, + "aut": 2156, + "é¢ĨåŁŁ": 2157, + "æļ": 2158, + "ollow": 2159, + "èģĮ": 2160, + "Ġchang": 2161, + "Ġbest": 2162, + "hip": 2163, + "åĨį": 2164, + "akes": 2165, + "Ġchat": 2166, + "ited": 2167, + "Ġpower": 2168, + "ä¿ĿæĬ¤": 2169, + "书": 2170, + "计åĪĴ": 2171, + "éĩįè¦ģçļĦ": 2172, + "åıĺåĮĸ": 2173, + "ilities": 2174, + "Ġconsider": 2175, + "æĪij们åı¯ä»¥": 2176, + "éĤ£ä¹Ī": 2177, + "Ġide": 2178, + "æ¼Ķ": 2179, + "aging": 2180, + "Ġbased": 2181, + "å®Ŀ": 2182, + "Ġrange": 2183, + "Ġresult": 2184, + "Ġmem": 2185, + "çħ§": 2186, + "Ġlevel": 2187, + "cou": 2188, + "Ġbr": 2189, + "Th": 2190, + "ä¼ģ": 2191, + "建ç«ĭ": 2192, + "Ġunique": 2193, + "è®Ń": 2194, + "Ġmark": 2195, + "许å¤ļ": 2196, + "è¡Į为": 2197, + "Ķç©¶": 2198, + "çļĦæĬ": 2199, + "Ġset": 2200, + "骤": 2201, + "ts": 2202, + "Ġhist": 2203, + "Ġaround": 2204, + "Ġrev": 2205, + "åħ¶ä¸Ń": 2206, + "ï¼ģ": 2207, + "æııè¿°": 2208, + "æľĢåIJİ": 2209, + "Ġsim": 2210, + "nect": 2211, + "åĽŀçŃĶ": 2212, + "éĺ²": 2213, + "èī¯": 2214, + "åΰäºĨ": 2215, + "ä¸ĸçķ": 2216, + "æĸ¹æ¡Ī": 2217, + "æĿIJæĸĻ": 2218, + "ä¸ĸçķĮ": 2219, + "æĽ´å¥½åľ°": 2220, + "两个": 2221, + "Ġemploy": 2222, + "Ġtry": 2223, + "æĵ": 2224, + "Ġback": 2225, + "åĪĩ": 2226, + "Ġsuccess": 2227, + "Ġdecisions": 2228, + "Ġthose": 2229, + "å¯Į": 2230, + "Ġfact": 2231, + "æİ¢": 2232, + "è¶£": 2233, + "Ġpractices": 2234, + "åIJĹ": 2235, + "æīį": 2236, + "çİ©": 2237, + "ption": 2238, + "æĸĩ竳": 2239, + "Ġfeat": 2240, + "Ġprevent": 2241, + "Ġwriting": 2242, + "çļĦæĢ": 2243, + "Ġno": 2244, + "ä»ĭ": 2245, + "éŨ": 2246, + "Ġdel": 2247, + "æĴ": 2248, + "Ġoptim": 2249, + "ination": 2250, + "ĠĊ": 2251, + "usion": 2252, + "Ġaccount": 2253, + "ling": 2254, + "Ġdivers": 2255, + ".\"": 2256, + "ath": 2257, + "èĭ±": 2258, + "ä¼ģä¸ļ": 2259, + "Ġgrou": 2260, + "åľ°çIJĥ": 2261, + "失": 2262, + "Ġpersonalized": 2263, + "ĠHe": 2264, + "表达": 2265, + "curity": 2266, + "Ġfollow": 2267, + "产çĶŁ": 2268, + "Ġear": 2269, + "åİĭ": 2270, + "vern": 2271, + "Ġissues": 2272, + "åĿĩ": 2273, + "é²": 2274, + "Ġdr": 2275, + "iving": 2276, + "Ġtraining": 2277, + "Ġrisk": 2278, + "åĩ½": 2279, + "åı²": 2280, + "æij": 2281, + "çļĦæĹ¶": 2282, + "ogn": 2283, + "Ġrequire": 2284, + "Ġenvironmental": 2285, + "back": 2286, + "éĶ®": 2287, + "çĸĹ": 2288, + "Ġinteract": 2289, + "åĽ¢éĺŁ": 2290, + "æ¯ı个": 2291, + "çĦ¶åIJİ": 2292, + "Ġdist": 2293, + "ç͍äºİ": 2294, + "认为": 2295, + "åĩ½æķ°": 2296, + "Ġsent": 2297, + "ĊĠĠĠĠĠĠĠĠ": 2298, + "Ġreducing": 2299, + "å¹²": 2300, + "Ġrep": 2301, + "Ġcaus": 2302, + "Ġmusic": 2303, + "çª": 2304, + "Ġmonitor": 2305, + "Ġform": 2306, + "é¢ľ": 2307, + "çĹħ": 2308, + "é¦Ļ": 2309, + "Ġoften": 2310, + "åı¯èĥ½ä¼ļ": 2311, + "åijĺå·¥": 2312, + "Ġhand": 2313, + "æĬķ": 2314, + "Ġneeds": 2315, + "æŃ¤å¤ĸ": 2316, + "åıĭ": 2317, + "ivity": 2318, + "Ġactivities": 2319, + "åĸľæ¬¢": 2320, + "Ġpur": 2321, + "ian": 2322, + "self": 2323, + "åĬ¨çī©": 2324, + "comes": 2325, + "å©": 2326, + "Ġpriv": 2327, + "az": 2328, + "Ġrelations": 2329, + "Ġmachine": 2330, + "çļĦæ°": 2331, + "ä»·æł¼": 2332, + "ä»·å̼": 2333, + "ç´¢": 2334, + "Ġfeed": 2335, + "ä¸Ģä¸ĭ": 2336, + "Ġteam": 2337, + "Ġindustry": 2338, + "è´¢": 2339, + "ĠPro": 2340, + "Ġwant": 2341, + "ç§°": 2342, + "Ġclass": 2343, + "Ġlove": 2344, + "åħ³äºİ": 2345, + "è¾ĵåħ¥": 2346, + "Ġtransport": 2347, + "Ġcomplex": 2348, + "Ġyear": 2349, + "éĶĢåĶ®": 2350, + "寻": 2351, + "ience": 2352, + "ists": 2353, + "æĶ¯æĮģ": 2354, + "Ġmind": 2355, + "Ġfun": 2356, + "Ġchar": 2357, + "æĮī": 2358, + "Ġconcerns": 2359, + "conom": 2360, + "ç®Ģåįķ": 2361, + "以ä¸ĭæĺ¯": 2362, + "Ġstart": 2363, + "å¹¶ä¸Ķ": 2364, + "avi": 2365, + "ä¸ŃåĽ½": 2366, + "åħĥç´ł": 2367, + "Ġconf": 2368, + "Ġpositive": 2369, + "Ġcur": 2370, + "Ġcount": 2371, + "ery": 2372, + "å¡": 2373, + "室": 2374, + "Ġcost": 2375, + "Ġequ": 2376, + "Ġpolic": 2377, + "aste": 2378, + "aw": 2379, + "éħĴ": 2380, + "coura": 2381, + "iven": 2382, + "place": 2383, + "chie": 2384, + "çļĦæķ°": 2385, + "åĽłç´ł": 2386, + "Ġfl": 2387, + "ism": 2388, + "Ġmedical": 2389, + "Ġhumans": 2390, + "Ġautom": 2391, + "ertainly": 2392, + "Ġ0": 2393, + "Ġoffers": 2394, + "Ġdetect": 2395, + "Ġ6": 2396, + "é£İæł¼": 2397, + "Ġshow": 2398, + "çģ«": 2399, + "Ġanim": 2400, + "é¢ľèī²": 2401, + "lease": 2402, + "ave": 2403, + "åĵª": 2404, + "ĠThere": 2405, + "以ä¸Ĭ": 2406, + "æľªæĿ¥": 2407, + "XX": 2408, + "çīĩ": 2409, + "uch": 2410, + "Ġtasks": 2411, + "åħ·ä½ĵ": 2412, + "æ¤įçī©": 2413, + "Ġmin": 2414, + "èīºæľ¯": 2415, + "icult": 2416, + "Ġexperiences": 2417, + "æİ§åζ": 2418, + "be": 2419, + "Ġpatients": 2420, + "å²": 2421, + "ĠWe": 2422, + "Ġrecogn": 2423, + "çĥ¤": 2424, + "Ġsmall": 2425, + "åĿĹ": 2426, + "åĦ": 2427, + "太éĺ³": 2428, + "ction": 2429, + "Ġent": 2430, + "æį¢": 2431, + "Ġbefore": 2432, + "Ġbecome": 2433, + "å·²ç»ı": 2434, + "表çݰ": 2435, + "Ġexplo": 2436, + "Ġachie": 2437, + "ä»»åĬ¡": 2438, + "大çļĦ": 2439, + "Ġday": 2440, + "Ġfound": 2441, + "å±±": 2442, + "ond": 2443, + "Ġtreatment": 2444, + "pend": 2445, + "hen": 2446, + "Ġcondit": 2447, + "ç¡®å®ļ": 2448, + "Ġbusinesses": 2449, + "ĠWh": 2450, + "æīĢæľī": 2451, + "Ġdeveloped": 2452, + "ç»Ī": 2453, + "æŃ¥éª¤": 2454, + "Ġdifficult": 2455, + "åı·": 2456, + "ĠRe": 2457, + "éĶĻ": 2458, + "Ġcho": 2459, + "Ġquest": 2460, + "Ġtranspare": 2461, + "Ġproject": 2462, + "Ġcommunity": 2463, + "ov": 2464, + "å¸Ī": 2465, + "å¼ł": 2466, + "åĪĨç±»": 2467, + "人çļĦ": 2468, + "sis": 2469, + "çĽĬ": 2470, + "oid": 2471, + "ĠAn": 2472, + "ways": 2473, + "Ġeas": 2474, + "Ġaffect": 2475, + "Ġothers": 2476, + "Ġregul": 2477, + "æĢ§åĴĮ": 2478, + "åĸĦ": 2479, + "agn": 2480, + "ä½ľä¸º": 2481, + "åı¯ä»¥å¸®åĬ©": 2482, + "åĦ¿": 2483, + "Ġorganizations": 2484, + "鸡": 2485, + "åħ´": 2486, + "Ġfriend": 2487, + "Ġ$": 2488, + "Ġdetail": 2489, + "Ġtraditional": 2490, + "Ġdesigned": 2491, + "è´Ńä¹°": 2492, + "ä½ĵéªĮ": 2493, + "ç»į": 2494, + "erm": 2495, + "Ġconnect": 2496, + "è¿Ļæł·": 2497, + "Ġrecommendations": 2498, + "Ġboth": 2499, + "ŁéĢļ": 2500, + "æ¯į": 2501, + "Ġsit": 2502, + "ä½ľç͍": 2503, + "ä»ĭç»į": 2504, + "Ġste": 2505, + "ĠSure": 2506, + "åı°": 2507, + "æĤ¨çļĦ": 2508, + "Ġshe": 2509, + "Ġmanagement": 2510, + "joy": 2511, + "è´Ł": 2512, + "Ġpromote": 2513, + "Ġvarious": 2514, + "(\"": 2515, + "por": 2516, + "Ġsens": 2517, + "Ġessential": 2518, + "gether": 2519, + "ularly": 2520, + "äºī": 2521, + "irst": 2522, + "Ġop": 2523, + "Ġspecies": 2524, + "çİ°åľ¨": 2525, + "cho": 2526, + "Ġbehavi": 2527, + "çŃij": 2528, + "女": 2529, + "Ġquality": 2530, + "Ġext": 2531, + "è¥": 2532, + "å®ĮæĪIJ": 2533, + "æĢ»ä¹ĭ": 2534, + "éĥ¨åĪĨ": 2535, + "ä»İèĢĮ": 2536, + "åĽ¾": 2537, + "Ġtyp": 2538, + "Ġstrate": 2539, + "西": 2540, + "Ġhere": 2541, + "ars": 2542, + "å¸Į": 2543, + "çļĦæĿ": 2544, + "å°Ŀ": 2545, + "ee": 2546, + "ier": 2547, + "Ġec": 2548, + "ically": 2549, + "ering": 2550, + "念": 2551, + "ĠDe": 2552, + "Ġneg": 2553, + "建çŃij": 2554, + "Ġservices": 2555, + "Ġable": 2556, + "imes": 2557, + "Ġoptions": 2558, + "缸åħ³": 2559, + "Ġsub": 2560, + "Ġdecision": 2561, + "ĠCertainly": 2562, + "Ġåľ¨": 2563, + "æ¢": 2564, + "Ġservice": 2565, + "):": 2566, + "带æĿ¥": 2567, + "Ġchild": 2568, + "è§£éĩĬ": 2569, + "irt": 2570, + "çĨ": 2571, + "ä¸įä»ħ": 2572, + "æĿ¾": 2573, + "积æŀģ": 2574, + "ron": 2575, + "åı¤": 2576, + "çłĶç©¶": 2577, + "ç²ī": 2578, + "hor": 2579, + "Ġprofess": 2580, + "çļĦéĹ®é¢ĺ": 2581, + "Ġopportunities": 2582, + "åİĨåı²": 2583, + "Ġdef": 2584, + "ĠAm": 2585, + "Ġgr": 2586, + "aur": 2587, + "å±Ĥ": 2588, + "çŃĸ": 2589, + "Ġpopular": 2590, + "æ´ģ": 2591, + "åıijçݰ": 2592, + "Ġpoem": 2593, + "èµĽ": 2594, + "Ġob": 2595, + "Ġdon": 2596, + "Ġsound": 2597, + "Ġtransportation": 2598, + "ious": 2599, + "åı¦": 2600, + "Ġrole": 2601, + "Ġfiel": 2602, + "ç§ijåѦ": 2603, + "èĢģ": 2604, + "reen": 2605, + "æľīæķĪ": 2606, + "Ġcor": 2607, + "Ġfeedback": 2608, + "Ġtechnologies": 2609, + "交éĢļ": 2610, + "Ġadapt": 2611, + "'re": 2612, + "ervation": 2613, + "Ġcommunities": 2614, + "çݰ代": 2615, + "Ġlook": 2616, + "Ġfac": 2617, + "ç͵影": 2618, + "Ġcollect": 2619, + "å¾Ĺåΰ": 2620, + "hips": 2621, + "Ġavail": 2622, + "eren": 2623, + "ä¸Ģèµ·": 2624, + "çīĽ": 2625, + "Ġposs": 2626, + "Ġweather": 2627, + "Ġefforts": 2628, + "¿Ģ": 2629, + "æĹħ": 2630, + "oh": 2631, + "Ġcollabor": 2632, + "æĭ¥": 2633, + "æĪIJåĬŁ": 2634, + "èİ·å¾Ĺ": 2635, + "å±ħ": 2636, + "Ġtre": 2637, + "Ġsources": 2638, + "Ġstudy": 2639, + "Ġprograms": 2640, + "éĻIJ": 2641, + "Ġtips": 2642, + "Ġmarket": 2643, + "ally": 2644, + "害": 2645, + "wards": 2646, + "æ£Ģ": 2647, + "ä¸Ģç¯ĩ": 2648, + "rior": 2649, + "Ġtop": 2650, + "Ġend": 2651, + "åĭ": 2652, + "Ġlarge": 2653, + "iciency": 2654, + "Ġdec": 2655, + "å®ļçļĦ": 2656, + "icient": 2657, + "è¿ĩç¨ĭä¸Ń": 2658, + "lications": 2659, + "缺": 2660, + "Ġtour": 2661, + "Ġtogether": 2662, + "人工": 2663, + "Ġtools": 2664, + "æĸ¯": 2665, + "æ°ij": 2666, + "æĬĬ": 2667, + "ä¹ĭéĹ´çļĦ": 2668, + "çī¹çĤ¹": 2669, + "Ġbel": 2670, + "ditionally": 2671, + "åĪ©ç͍": 2672, + "è¾¹": 2673, + "éĻį": 2674, + "ĠIf": 2675, + "é¢Ŀ": 2676, + "åįı": 2677, + "å¾Ģ": 2678, + "lish": 2679, + "è¯ī": 2680, + "ins": 2681, + "奶": 2682, + "Ġeconom": 2683, + "Ġinvest": 2684, + "ĠDo": 2685, + "tain": 2686, + "åĩºçݰ": 2687, + "çļĦå½±åĵį": 2688, + "aterial": 2689, + "Ġsure": 2690, + "Ġpass": 2691, + "çĶ»": 2692, + "è´£": 2693, + "ç»ĵæŀĦ": 2694, + "æķħ": 2695, + "æĥħæĦŁ": 2696, + "æ¿Ģ": 2697, + "ellig": 2698, + "ä¼Ĺ": 2699, + "æ¯Ķè¾ĥ": 2700, + "tern": 2701, + "Ġoutcomes": 2702, + "up": 2703, + "Ġbeaut": 2704, + "read": 2705, + "çĶŁæĪIJ": 2706, + "æķ°åŃĹ": 2707, + "Ġdem": 2708, + "ires": 2709, + "åı¯ä»¥éĢļè¿ĩ": 2710, + "æĸ°çļĦ": 2711, + "Ġdeep": 2712, + "å¨": 2713, + "çĭĹ": 2714, + "åħ³æ³¨": 2715, + "çĶŁåij½": 2716, + "ä¼łç»Ł": 2717, + "Ġstay": 2718, + "æŃĮ": 2719, + "åħ³éĶ®": 2720, + "Ġplace": 2721, + "主é¢ĺ": 2722, + "å¾Īå¤ļ": 2723, + "èĪĴ": 2724, + "Ġprofessional": 2725, + "yle": 2726, + "æĽ²": 2727, + "19": 2728, + "Ġessay": 2729, + "Ġgive": 2730, + "ç³ĸ": 2731, + "Ġonly": 2732, + "æŁIJ": 2733, + "Ġphys": 2734, + "对è¯Ŀ": 2735, + "Ġcontro": 2736, + "Ġamount": 2737, + "cept": 2738, + "ization": 2739, + "ç¼ĸåĨĻ": 2740, + "åıĹåΰ": 2741, + "Ġalways": 2742, + "æ¯Ķå¦Ĥ": 2743, + "Ġprivacy": 2744, + "au": 2745, + "________": 2746, + "Ġresponsible": 2747, + "()": 2748, + "çŃīçŃī": 2749, + "Ġmaterial": 2750, + "Ġonline": 2751, + "é¼": 2752, + "æĶ¿": 2753, + "åĽĽ": 2754, + "Ġenjoy": 2755, + "åľŁ": 2756, + "Ġsafety": 2757, + "Ġtw": 2758, + "Ġcommunication": 2759, + "丽": 2760, + "æĺ¾": 2761, + "olution": 2762, + "erg": 2763, + "įä½ľ": 2764, + "Ġuser": 2765, + "Ġemotional": 2766, + "time": 2767, + "é¾": 2768, + "Ġsecurity": 2769, + "Ġsense": 2770, + "elines": 2771, + "åĬ±": 2772, + "çī©è´¨": 2773, + "ura": 2774, + "Ġshare": 2775, + "Ġanalyzing": 2776, + "ital": 2777, + "é±": 2778, + "irtual": 2779, + "Ġvisit": 2780, + "bers": 2781, + "Ġcour": 2782, + "Ġproble": 2783, + "设å¤ĩ": 2784, + "atch": 2785, + "land": 2786, + "é±¼": 2787, + "æĪij们éľĢè¦ģ": 2788, + "稳": 2789, + "ibility": 2790, + "Ġefficiency": 2791, + "声": 2792, + "èĴ": 2793, + "æľºåύ": 2794, + "Ġclear": 2795, + "åζå®ļ": 2796, + "izing": 2797, + "Ġconditions": 2798, + "lusion": 2799, + "Ġlow": 2800, + "Ġlim": 2801, + "hers": 2802, + "Ġrisks": 2803, + "ç¿»": 2804, + "Ġlet": 2805, + "åĴĸ": 2806, + "å¿ĥçIJĨ": 2807, + "è¿ľ": 2808, + "print": 2809, + "Ġchanges": 2810, + "Ġmeas": 2811, + "Ġimproving": 2812, + "Ġcrit": 2813, + "50": 2814, + "å¸ĮæľĽ": 2815, + "Ġaud": 2816, + "åįĹ": 2817, + "æĹłæ³ķ": 2818, + "Ġnegative": 2819, + "é¡¹çĽ®": 2820, + "und": 2821, + "ats": 2822, + "Ġcompanies": 2823, + "æī¾åΰ": 2824, + "Ġcontribut": 2825, + "æŃ£ç¡®": 2826, + "é»Ħ": 2827, + "å±ŀ": 2828, + "Ġunderstanding": 2829, + "Ġmult": 2830, + "Ġclo": 2831, + "å¾ģ": 2832, + "Ġprior": 2833, + "rim": 2834, + "人工æĻºèĥ½": 2835, + "Ġvariety": 2836, + "Ġtaking": 2837, + "åĤ": 2838, + "aster": 2839, + "ody": 2840, + "Ġ{": 2841, + "çļĦéĩįè¦ģ": 2842, + "Ġfore": 2843, + "èµĦæºIJ": 2844, + "è¦ģæ±Ĥ": 2845, + "Ġfeatures": 2846, + "èįī": 2847, + "me": 2848, + "èĮĥ": 2849, + "Ġoper": 2850, + "级": 2851, + "é²ľ": 2852, + "æĬĢå·§": 2853, + "ijæĪĺ": 2854, + "ç±»åŀĭ": 2855, + "æĿ¿": 2856, + "软": 2857, + "ew": 2858, + "Ġrestaur": 2859, + "Ġwithout": 2860, + "ructure": 2861, + "çļĦæĺ¯": 2862, + "çı": 2863, + "Ġlist": 2864, + "urate": 2865, + "Ġbook": 2866, + "亲": 2867, + "åºĹ": 2868, + "ä¹Łæĺ¯": 2869, + "ä»»ä½ķ": 2870, + "Ġcam": 2871, + "ĠBe": 2872, + "Ġgovern": 2873, + "Ġbehavior": 2874, + "è®Ńç»ĥ": 2875, + "Ġfamily": 2876, + "æĿĤ": 2877, + "Ġcity": 2878, + "Ġapproach": 2879, + "Ġaccurate": 2880, + "Ġsom": 2881, + "Ġel": 2882, + "èĪŀ": 2883, + "èŀ": 2884, + "åŁºæľ¬": 2885, + "Ġdise": 2886, + "Ġencoura": 2887, + "ĠWhat": 2888, + "åĥ": 2889, + "详": 2890, + "¦Ĥ": 2891, + "å·¥åħ·": 2892, + "åķ¡": 2893, + "Ġstill": 2894, + "chool": 2895, + "æĦŁåΰ": 2896, + "çĶŁçī©": 2897, + "åĴĸåķ¡": 2898, + "åĩĨå¤ĩ": 2899, + "Ġwaste": 2900, + "Ġevents": 2901, + "æķĻèĤ²": 2902, + "Ġ8": 2903, + "Ġmust": 2904, + "ied": 2905, + "asing": 2906, + "å½¢æĪIJ": 2907, + "Ġproducts": 2908, + "åħ¸": 2909, + "讲": 2910, + "fter": 2911, + "å·®": 2912, + "less": 2913, + "Ġcro": 2914, + "Ġfinan": 2915, + "åıįåºĶ": 2916, + "åĪĽéĢł": 2917, + "Ġguidelines": 2918, + "åΤ": 2919, + "ä½ľåĵģ": 2920, + "表示": 2921, + "å¼Ĥ": 2922, + "Ġknown": 2923, + "Ġtest": 2924, + "误": 2925, + "ope": 2926, + "Ġusers": 2927, + "AI": 2928, + "å¾·": 2929, + "new": 2930, + "追": 2931, + "iques": 2932, + "模åŀĭ": 2933, + "åĬĽåĴĮ": 2934, + "Ġhistory": 2935, + "ĠAl": 2936, + "æĬķèµĦ": 2937, + "å°Ŀè¯ķ": 2938, + "ank": 2939, + "Ġhome": 2940, + "éĴŁ": 2941, + "丰": 2942, + "èĪĴéĢĤ": 2943, + "Ġincrease": 2944, + "Ġhab": 2945, + "åĪ»": 2946, + "è¾ĵåĩº": 2947, + "Ġleading": 2948, + "Ġ7": 2949, + "é£İéĻ©": 2950, + "Ġperformance": 2951, + "Ġhapp": 2952, + "åŃ£": 2953, + "Ġstand": 2954, + "ty": 2955, + "ç¦ı": 2956, + "Ġcustomers": 2957, + "åįİ": 2958, + "Ġbelie": 2959, + "Ġcompany": 2960, + "å½ķ": 2961, + "é£Łçī©": 2962, + "ĠUn": 2963, + "Ġsumm": 2964, + "rent": 2965, + "ĠCon": 2966, + "éĢĤéĩı": 2967, + "anced": 2968, + "Ġi": 2969, + "Ġlight": 2970, + "Ġanalysis": 2971, + "å°Ĭ": 2972, + "ĠUse": 2973, + "ouse": 2974, + "ted": 2975, + "Ġcharact": 2976, + "Ġ#": 2977, + "to": 2978, + "绾": 2979, + "ä¸įæĺ¯": 2980, + "Ġdeveloping": 2981, + "åŁ¹": 2982, + "Ġstrategies": 2983, + "Ġmight": 2984, + "çŁŃ": 2985, + "çļĦæİ": 2986, + "Ġfirst": 2987, + "èĥĮ": 2988, + "çĮ«": 2989, + "Ġincludes": 2990, + "åĽŃ": 2991, + "Ġdiagn": 2992, + "Ġgrowth": 2993, + "ä¸ĵä¸ļ": 2994, + "Ġdoes": 2995, + "12": 2996, + "绿": 2997, + "Ġkeep": 2998, + "详ç»Ĩ": 2999, + "åĥı": 3000, + "åıijçĶŁ": 3001, + "fact": 3002, + "åı¯ä»¥åľ¨": 3003, + "ç«Ļ": 3004, + "æĭī": 3005, + "æµİ": 3006, + "Ġchatbots": 3007, + "Ġbreak": 3008, + "è¡¡": 3009, + "çŁ³": 3010, + "æĮģç»Ń": 3011, + "life": 3012, + "Ġ10": 3013, + "æ´Ĺ": 3014, + "ĠAdditionally": 3015, + "士": 3016, + "ember": 3017, + "Ġgoals": 3018, + "å¾®": 3019, + "Ġview": 3020, + "·": 3021, + "ove": 3022, + "åŁºç¡": 3023, + "Ġoptimize": 3024, + "Ġtem": 3025, + "Ġdown": 3026, + "åŁºç¡Ģ": 3027, + "è¶ħ": 3028, + "ercis": 3029, + "Ġless": 3030, + "ees": 3031, + "æĿĥ": 3032, + "Ġkey": 3033, + "Ġworks": 3034, + "讨": 3035, + "åı¥åŃIJ": 3036, + "Ġrobot": 3037, + "uss": 3038, + "åħ¨çIJĥ": 3039, + "ç»ıæµİ": 3040, + "æīįèĥ½": 3041, + "egr": 3042, + "ä»ĸ们çļĦ": 3043, + "äºĶ": 3044, + "èµ·æĿ¥": 3045, + "çĵ": 3046, + "Ġfactors": 3047, + "Ġcultural": 3048, + "æľ¨": 3049, + "Ġworking": 3050, + "ä¼¼": 3051, + "èIJ½": 3052, + "éĢŁåº¦": 3053, + "ä½ı": 3054, + "Ġeffects": 3055, + "å©ļ": 3056, + "br": 3057, + "åİħ": 3058, + "rain": 3059, + "\")": 3060, + "åѦçĶŁ": 3061, + "\",": 3062, + "Ġpar": 3063, + "atform": 3064, + "Ġensuring": 3065, + "çͱäºİ": 3066, + "Ġmuch": 3067, + "Ġwords": 3068, + "Ġmar": 3069, + "ç»ıéªĮ": 3070, + "为äºĨ": 3071, + "åIJĪä½ľ": 3072, + "ven": 3073, + "Ġ/": 3074, + "Ġfinancial": 3075, + "work": 3076, + "ories": 3077, + "æ²»": 3078, + "Ġtechniques": 3079, + "æĭ¥æľī": 3080, + "rap": 3081, + "å°Ķ": 3082, + "Ġest": 3083, + "Ġavailable": 3084, + "Ġlit": 3085, + "æ¹": 3086, + "Ġefficient": 3087, + "els": 3088, + "over": 3089, + "Ġland": 3090, + "Ġarea": 3091, + "Ġintellig": 3092, + "Ġpref": 3093, + "ature": 3094, + "çŁ¥è¯Ĩ": 3095, + "æĵįä½ľ": 3096, + "å¾ħ": 3097, + "igate": 3098, + "çļĦæĶ": 3099, + "Ġmean": 3100, + "bo": 3101, + "Ġcontrol": 3102, + "éĩĩç͍": 3103, + "ricult": 3104, + "Ġprogramm": 3105, + "Ġtowards": 3106, + "thing": 3107, + "ä¸įè¦ģ": 3108, + "Ġthough": 3109, + "彩": 3110, + "Ġcertain": 3111, + "Ġwild": 3112, + "ä»Ĭ": 3113, + "Ġconservation": 3114, + "çŁ¥éģĵ": 3115, + "Ġreally": 3116, + "çļĦåľ°": 3117, + "io": 3118, + "饰": 3119, + "Ġful": 3120, + "çݯä¿Ŀ": 3121, + "Ġexplore": 3122, + "çļĦæ¸": 3123, + "Ġdiverse": 3124, + "åĬłå¼º": 3125, + "çļ®": 3126, + "Ġemotions": 3127, + "Ġavoid": 3128, + "'ll": 3129, + "çļĦæī": 3130, + "åį¡": 3131, + "Ġplatform": 3132, + "ances": 3133, + "Ġsitu": 3134, + "ä»ĺ": 3135, + "ä½įç½®": 3136, + "oring": 3137, + "çĽIJ": 3138, + "ä¸ĩ": 3139, + "Ġdev": 3140, + "nov": 3141, + "ash": 3142, + "Ġtwo": 3143, + "å®ł": 3144, + "bon": 3145, + "èµ°": 3146, + "åĪĹ表": 3147, + "Ġcy": 3148, + "èįIJ": 3149, + "ĠSome": 3150, + "Ġexplain": 3151, + "Ġaware": 3152, + "社交": 3153, + "day": 3154, + "åıĮ": 3155, + "æ²ŁéĢļ": 3156, + "æ°§": 3157, + "å¼Ģåıij": 3158, + "åħ¬åı¸çļĦ": 3159, + "Ġair": 3160, + "åĩ»": 3161, + "aring": 3162, + "éĥ½æĺ¯": 3163, + "Ġlevels": 3164, + "ods": 3165, + "Ġsteps": 3166, + "Ġcap": 3167, + "æ´ŀ": 3168, + "马": 3169, + "Ġreturn": 3170, + "Ġmet": 3171, + "çĶŁæĢģ": 3172, + "丰å¯Į": 3173, + "æŁĵ": 3174, + "æīĢ以": 3175, + "é¡»": 3176, + "Ġer": 3177, + "Ġfra": 3178, + "30": 3179, + "èĵ": 3180, + "âĢĶ": 3181, + "Ġå½ĵ": 3182, + "ah": 3183, + "ä¿ĥ": 3184, + "Ġlikely": 3185, + "ĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠ": 3186, + "åĪĿ": 3187, + "Ġcreating": 3188, + "Ġfarm": 3189, + "Ġbal": 3190, + "Ġlives": 3191, + "å®ĥçļĦ": 3192, + "Ġability": 3193, + "ä¸ĬçļĦ": 3194, + "Ġsentence": 3195, + "åĤ¨": 3196, + "Ġrout": 3197, + "Ġprovides": 3198, + "Ġagain": 3199, + "å®łçī©": 3200, + "éĢIJ": 3201, + "Ġyears": 3202, + "èŀį": 3203, + "Ġphysical": 3204, + "Python": 3205, + "ĠEx": 3206, + "iting": 3207, + "è°ĥæķ´": 3208, + "ç½ij绾": 3209, + "æħ¢": 3210, + "空éĹ´": 3211, + "åĽ°": 3212, + "è±Ĩ": 3213, + "æĽ´å¤ļçļĦ": 3214, + "ĠAr": 3215, + "Ġmaintain": 3216, + "å®ŀéĻħ": 3217, + "Ġtravel": 3218, + "Ġsat": 3219, + "pro": 3220, + "ç͵åŃIJ": 3221, + "æ±½": 3222, + "ex": 3223, + "åģĩ": 3224, + "æIJŃ": 3225, + "éļıçĿĢ": 3226, + "è¿ĺæľī": 3227, + "礼": 3228, + "ale": 3229, + "Ġconsum": 3230, + "ĊĠ": 3231, + "ncy": 3232, + "Ġquestions": 3233, + "fort": 3234, + "making": 3235, + "Ġdesc": 3236, + "15": 3237, + "Ġinvolves": 3238, + "Ġstress": 3239, + "åŃĹ符": 3240, + "here": 3241, + "Ġimpacts": 3242, + "Ġexercis": 3243, + "åĿļ": 3244, + "ledge": 3245, + "ç§ijæĬĢ": 3246, + "oci": 3247, + "Ġeffectively": 3248, + "æ¶Īè´¹": 3249, + "Ġconclusion": 3250, + "éĺħ": 3251, + "Ġstre": 3252, + "issions": 3253, + "æ·»": 3254, + "It": 3255, + "éĿĻ": 3256, + "Ġvirtual": 3257, + "è¡£": 3258, + "Ġachieve": 3259, + "ource": 3260, + "è¿ŀ": 3261, + "acks": 3262, + "è¡¨æł¼": 3263, + "Ġimportance": 3264, + "èĩªæĪij": 3265, + "These": 3266, + "num": 3267, + "çļĦæł": 3268, + "Ġrelationships": 3269, + "Ġworkers": 3270, + "gical": 3271, + "orpor": 3272, + "erson": 3273, + "åij¢": 3274, + "nds": 3275, + "æİ¨èįIJ": 3276, + "ohn": 3277, + "å¿ħé¡»": 3278, + "容æĺĵ": 3279, + "ĠGo": 3280, + "Ġtell": 3281, + "ĠRes": 3282, + "onom": 3283, + "Ġbec": 3284, + "æ³Ľ": 3285, + "pos": 3286, + "Ġmove": 3287, + "Ġstory": 3288, + "æŃ¢": 3289, + "Ġpriorit": 3290, + "Ġindustries": 3291, + "èľ": 3292, + "Ġpossible": 3293, + "ĠMan": 3294, + "Ġexpress": 3295, + "abilities": 3296, + "Ġintegr": 3297, + "代表": 3298, + "Ġrespond": 3299, + "åĪĨéĴŁ": 3300, + "æľºä¼ļ": 3301, + "Ġthings": 3302, + "交æµģ": 3303, + "Ġmeth": 3304, + "urther": 3305, + "Ġwide": 3306, + "èijĹ": 3307, + "æĪijçļĦ": 3308, + "ĸçķ¥": 3309, + "ides": 3310, + "ething": 3311, + "ĠWhile": 3312, + "pan": 3313, + "çŃĸçķ¥": 3314, + "Ġcent": 3315, + "Ġplease": 3316, + "ology": 3317, + "uracy": 3318, + "循": 3319, + "ward": 3320, + "nce": 3321, + "Ġthen": 3322, + "çªģ": 3323, + "å¥ĩ": 3324, + "Ġblo": 3325, + "ai": 3326, + "æŀĹ": 3327, + "ç®Ĺæ³ķ": 3328, + "综": 3329, + "Ġprint": 3330, + "aces": 3331, + "lu": 3332, + "ªæĸ½": 3333, + "pre": 3334, + "çļĦæĦı": 3335, + "Ġsol": 3336, + "Ġoverall": 3337, + "hold": 3338, + "Ġes": 3339, + "çļĦä¸Ģ": 3340, + "éģĩ": 3341, + "Ġpopul": 3342, + "å°ı说": 3343, + "æ³¢": 3344, + "åįģ": 3345, + "ä¹Łåı¯ä»¥": 3346, + "é£Łåĵģ": 3347, + "Ġcontent": 3348, + "å°Ħ": 3349, + "Ġrequires": 3350, + "æ£ĢæŁ¥": 3351, + "ĊĠĠĠĠĠĠĠĠĠĠĠ": 3352, + "Ġgroups": 3353, + "Ġfair": 3354, + "Ġbl": 3355, + "å®ŀéªĮ": 3356, + "æĮīçħ§": 3357, + "osp": 3358, + "str": 3359, + "ä¸įèĥ½": 3360, + "Ġharm": 3361, + "Ġprodu": 3362, + "çļĦæĬĢ": 3363, + "çĩ": 3364, + "tle": 3365, + "Ġanimals": 3366, + "è§Ĵèī²": 3367, + "lev": 3368, + "æ¸IJ": 3369, + "å¤įæĿĤ": 3370, + "Ġdepend": 3371, + "æĮijæĪĺ": 3372, + "åĮħåIJ«": 3373, + "Ġhelps": 3374, + "Ġopen": 3375, + "Ġnet": 3376, + "ĠĠĠĠĠ": 3377, + "Ġstrong": 3378, + "Ġjour": 3379, + "å¹¿æ³Ľ": 3380, + "æķ´ä¸ª": 3381, + "Ġelect": 3382, + "Ġresponse": 3383, + "åįķè¯į": 3384, + "æľĭ": 3385, + "Ġ<": 3386, + "åĮĸåѦ": 3387, + "éĴĪ": 3388, + "Ġquick": 3389, + "ually": 3390, + "Ġsomething": 3391, + "Ġtrack": 3392, + "度åĴĮ": 3393, + "erences": 3394, + "æłij": 3395, + "Ġaccuracy": 3396, + "Ġexc": 3397, + "é£ŀ": 3398, + "Ġfield": 3399, + "寻æī¾": 3400, + "éħ¸": 3401, + "Ġhope": 3402, + "çij": 3403, + "Ġinnov": 3404, + "绪": 3405, + "alk": 3406, + "Ġtypes": 3407, + "Ġdid": 3408, + "åĬª": 3409, + "Ġcall": 3410, + "è¯Ĺ": 3411, + "Ġearly": 3412, + "ĠOne": 3413, + "app": 3414, + "Ġcommon": 3415, + "æľĢç»Ī": 3416, + "Ġcheck": 3417, + "Ġsym": 3418, + "çĤĴ": 3419, + "æĬĢèĥ½": 3420, + "Ġenh": 3421, + "Ġagricult": 3422, + "Ġimm": 3423, + "ç»ĩ": 3424, + "满足": 3425, + "Ġschool": 3426, + "bal": 3427, + "Ġfollowing": 3428, + "based": 3429, + "Ġwebs": 3430, + "Ġculture": 3431, + "ĠCom": 3432, + "way": 3433, + "ä¸Ģå®ļ": 3434, + "åķĨåĵģ": 3435, + "ude": 3436, + "çļĦåıijå±ķ": 3437, + "çĶŁäº§": 3438, + "osystem": 3439, + "Ġplant": 3440, + "åı¶": 3441, + "åIJĥ": 3442, + "ä»ĸçļĦ": 3443, + "der": 3444, + "询": 3445, + "å®¶åħ·": 3446, + "Ġfree": 3447, + "ç§»": 3448, + "æİĮ": 3449, + "Ġbody": 3450, + "Ġpresent": 3451, + "Ġparticularly": 3452, + "Ġchildren": 3453, + "Ġstudent": 3454, + ").": 3455, + "çī¹å¾ģ": 3456, + "èĶ": 3457, + "éĺħ读": 3458, + "æķĪçİĩ": 3459, + "Ġprogram": 3460, + "éħ±": 3461, + "åıĺå¾Ĺ": 3462, + "ix": 3463, + "Ġcome": 3464, + "çļĦæ²": 3465, + "ĠTe": 3466, + "ĠTo": 3467, + "åħ±åIJĮ": 3468, + "Ġemployees": 3469, + "说æĺİ": 3470, + "Ġheart": 3471, + "Ġmot": 3472, + "æľĭåıĭ": 3473, + "eric": 3474, + "è¯ij": 3475, + "Ġcurrent": 3476, + "æĪIJæľ¬": 3477, + "Ġtoo": 3478, + "çݩ家": 3479, + "åĪĽæĸ°": 3480, + "Ġecosystem": 3481, + "常è§ģ": 3482, + "ä¸ĢæŃ¥": 3483, + "Ġpres": 3484, + "Ġmulti": 3485, + "åijĬè¯ī": 3486, + "严": 3487, + "Ġmit": 3488, + "Ġaction": 3489, + "çĨŁ": 3490, + "Ġhabit": 3491, + "åı£æĦŁ": 3492, + "ç®±": 3493, + "Ġuses": 3494, + "å¢ŀ强": 3495, + "ç»Ļåĩº": 3496, + "Ġ9": 3497, + "Ġdep": 3498, + "Ġeconomic": 3499, + "æĢ§çļĦ": 3500, + "18": 3501, + "åĨ°": 3502, + "Ġhelped": 3503, + "åIJ¸å¼ķ": 3504, + "çİĭ": 3505, + "Ġdiagnos": 3506, + "åł": 3507, + "èģĶç³»": 3508, + "群": 3509, + "ç»ĥä¹ł": 3510, + "æĪIJéķ¿": 3511, + "Ġpoint": 3512, + "å®ļæľŁ": 3513, + "åij¼": 3514, + "èį¯": 3515, + "æĿ¯": 3516, + "æ¤Ĵ": 3517, + "æķĪæŀľ": 3518, + "Ġspecial": 3519, + "æ··": 3520, + "åĩłä¸ª": 3521, + "ause": 3522, + "éĨ": 3523, + "æ¯ĶèµĽ": 3524, + "è·Ŀ": 3525, + "What": 3526, + "Ġtimes": 3527, + "icles": 3528, + "Ġ*": 3529, + "ç´§": 3530, + "å¦Ĥæŀľä½ł": 3531, + "çĭ¬çī¹": 3532, + "çģµ": 3533, + "ç¨İ": 3534, + "Ġcarbon": 3535, + "Ġbias": 3536, + "åĬ©äºİ": 3537, + "Ġconst": 3538, + "èĩªçͱ": 3539, + "æĿ¥è¯´": 3540, + "å°±æĺ¯": 3541, + "åį°": 3542, + "Ġmeet": 3543, + "è§ĦåĪĴ": 3544, + "çļĦç¾": 3545, + "èIJ¥åħ»": 3546, + "ators": 3547, + "稳å®ļ": 3548, + "ode": 3549, + "çħ®": 3550, + "Ġassoci": 3551, + "å¿Ĺ": 3552, + "è¡ĮæĺŁ": 3553, + "æĿİ": 3554, + "Ġreview": 3555, + "åĩĢ": 3556, + "ĠRo": 3557, + "Ġknowledge": 3558, + "以便": 3559, + "æµĭè¯ķ": 3560, + "åIJĪéĢĤ": 3561, + "sc": 3562, + "å½¢å¼ı": 3563, + "Ġfriends": 3564, + "Ġnature": 3565, + "Ġcritical": 3566, + "æ´ĭ": 3567, + "Ġafter": 3568, + "erve": 3569, + "Ġrece": 3570, + "çļĦæŃ": 3571, + "汽车": 3572, + "çķĮ": 3573, + "Ġloss": 3574, + "Ġapplications": 3575, + "å¤ļç§į": 3576, + "éĶħ": 3577, + "串": 3578, + "Ġinsp": 3579, + "---": 3580, + "ĠSh": 3581, + "Ġvol": 3582, + "lut": 3583, + "oks": 3584, + "sequ": 3585, + "Ġbir": 3586, + "åIJĪçIJĨ": 3587, + "Ġnecess": 3588, + "æĪijæĥ³": 3589, + "çŃīæĸ¹éĿ¢": 3590, + "é¼ĵ": 3591, + "Ġsoft": 3592, + "Ġlive": 3593, + "å°ıæĺİ": 3594, + "ĠInd": 3595, + "Ġbring": 3596, + "æĺ¯æĮĩ": 3597, + "Ġsoil": 3598, + "ilar": 3599, + "举": 3600, + "æĿ¡ä»¶": 3601, + "Ġtri": 3602, + "亮": 3603, + "Ġmom": 3604, + "æı¡": 3605, + "ä¼°": 3606, + "ŀäºī": 3607, + "çĽij": 3608, + "èĤ¤": 3609, + "è´¢åĬ¡": 3610, + "æ·»åĬł": 3611, + "é¥®é£Ł": 3612, + "Ġallowing": 3613, + "åºķ": 3614, + "Ġright": 3615, + "Ġexpert": 3616, + "Ġsupp": 3617, + "Ġinit": 3618, + "çļĦæµ": 3619, + "arget": 3620, + "Ġexpect": 3621, + "Ġ19": 3622, + "Ġmeasures": 3623, + "olutions": 3624, + "just": 3625, + "arc": 3626, + "å°ļ": 3627, + "Ġpractice": 3628, + "æľīåĬ©äºİ": 3629, + "大éĩı": 3630, + "',": 3631, + "iment": 3632, + "Ġcontinue": 3633, + "Ġdiscuss": 3634, + "100": 3635, + "éļľ": 3636, + "çļĦæĦŁ": 3637, + "Ġreflect": 3638, + "itation": 3639, + "åį«": 3640, + "äºĨä¸Ģ": 3641, + "ney": 3642, + "ĠLe": 3643, + "ised": 3644, + "è¶ĭ": 3645, + "äºĨä¸Ģ个": 3646, + "Ġincreasing": 3647, + "çļĦæĮ": 3648, + "Ġstru": 3649, + "æĢ»ç»ĵ": 3650, + "ely": 3651, + "å®ĩ": 3652, + "Ġauthor": 3653, + "表éĿ¢": 3654, + "Ġx": 3655, + "æķħäºĭ": 3656, + "emic": 3657, + "Ġrepresent": 3658, + "ger": 3659, + "Ġincreased": 3660, + "ones": 3661, + "ains": 3662, + "Ġtrained": 3663, + "Ġfish": 3664, + "Ġstate": 3665, + "åĨ·": 3666, + "çĶŁéķ¿": 3667, + "Ġrenew": 3668, + "ording": 3669, + "åĮĹ": 3670, + "æİªæĸ½": 3671, + "平衡": 3672, + "Ġsuccessful": 3673, + "ä¸ĭéĿ¢": 3674, + "Ġactivity": 3675, + "èĮ¶": 3676, + "éĢĤåºĶ": 3677, + "èĦij": 3678, + "æİ¢ç´¢": 3679, + "ffic": 3680, + "ç»ĦæĪIJ": 3681, + "atives": 3682, + "äºļ": 3683, + "Ġscen": 3684, + "æ²Ļ": 3685, + "gress": 3686, + "使å¾Ĺ": 3687, + "æī¿": 3688, + "Ġdiscrim": 3689, + "Ġassistants": 3690, + "Ġexist": 3691, + "çķĻ": 3692, + "Ġspace": 3693, + "æľĢè¿ij": 3694, + "Ġideas": 3695, + "éĩĩåıĸ": 3696, + "light": 3697, + "注éĩį": 3698, + "çļĦæĹ¶éĹ´": 3699, + "è¿İ": 3700, + "Ġcomb": 3701, + "éĢĤå½ĵ": 3702, + "Ġyourself": 3703, + "rite": 3704, + "ason": 3705, + "åĮĢ": 3706, + "åı¯ä»¥ä½¿ç͍": 3707, + "åħħ满": 3708, + "Ġvalues": 3709, + "æ½": 3710, + "Ġbiases": 3711, + "ä¿ĥè¿Ľ": 3712, + "åľºæĻ¯": 3713, + "ross": 3714, + "åį³åı¯": 3715, + "Ġcru": 3716, + "Ġnumber": 3717, + "Ġtype": 3718, + "rast": 3719, + "åĩĨç¡®": 3720, + "This": 3721, + "Ġpast": 3722, + "çģ¯": 3723, + "å®ļä¹ī": 3724, + "Ġsolutions": 3725, + "Ġter": 3726, + "ä¿Ŀè¯ģ": 3727, + "èͬ": 3728, + "幸": 3729, + "åī§": 3730, + "åħ´è¶£": 3731, + "åª": 3732, + "ention": 3733, + "avor": 3734, + "Ġscient": 3735, + "åĬªåĬĽ": 3736, + "Ġproviders": 3737, + "Ġpolicies": 3738, + "alu": 3739, + "ĠIm": 3740, + "Ġallows": 3741, + "Ġintelligence": 3742, + "çļĦæĸ¹æ³ķ": 3743, + "è¿Ļæĺ¯": 3744, + "Ġ`": 3745, + "Ġemissions": 3746, + "Ġå°Ĩ": 3747, + "Ġmeaning": 3748, + "Ġstyle": 3749, + "åİŁåĽł": 3750, + "Ġstrugg": 3751, + "çļĦç¾İ": 3752, + "iful": 3753, + "dition": 3754, + "éĥ½æľī": 3755, + "空æ°Ķ": 3756, + "å®ĥ们çļĦ": 3757, + "ä¼ĺåĮĸ": 3758, + "Ġinflu": 3759, + "åŁºäºİ": 3760, + "Ġdetails": 3761, + "Ġtransparency": 3762, + "Ġmess": 3763, + "ĠCl": 3764, + "Ġgame": 3765, + "pri": 3766, + "è¶ĭåĬ¿": 3767, + "å½Ĵ": 3768, + "ç¿»è¯ij": 3769, + "æķ£": 3770, + "By": 3771, + "éŃ": 3772, + "ĠAmeric": 3773, + "Ġproduction": 3774, + "Ġincorpor": 3775, + "æĻļ": 3776, + "Ġinvolve": 3777, + "Ġhot": 3778, + "æĻ®": 3779, + "by": 3780, + "Ġflow": 3781, + "Ġemerg": 3782, + "座": 3783, + "Ġidea": 3784, + "åİĭåĬĽ": 3785, + "éĿĴ": 3786, + "oms": 3787, + "èģĮä¸ļ": 3788, + "Ġreport": 3789, + "Ġpap": 3790, + "Ġtherap": 3791, + "Ġsal": 3792, + "åıĤä¸İ": 3793, + "æĸĩåѦ": 3794, + "æIJŃéħį": 3795, + "oot": 3796, + "),": 3797, + "Ġcr": 3798, + "Ġprocesses": 3799, + "gin": 3800, + "å¹³åı°": 3801, + "å¯Ł": 3802, + "Ġpromoting": 3803, + "æļĸ": 3804, + "akehold": 3805, + "ç»§": 3806, + "iver": 3807, + "æ¦Ĥ": 3808, + "Ġmodels": 3809, + "Ġdra": 3810, + "èĸ": 3811, + "Ġgroup": 3812, + "è¶³å¤Ł": 3813, + "Ġgreen": 3814, + "Ġhealthy": 3815, + "Ġcomfort": 3816, + "Ġadditional": 3817, + "ä¸Ģ次": 3818, + "é¤IJåİħ": 3819, + "Ġmaterials": 3820, + "Ġmanage": 3821, + "çļĦæ¯": 3822, + "伤": 3823, + "åıĬæĹ¶": 3824, + "Ġglo": 3825, + "Ġstat": 3826, + "å¿«éĢŁ": 3827, + "Ġmonitoring": 3828, + "aily": 3829, + "rand": 3830, + "oice": 3831, + "resh": 3832, + "ç»Ħç»ĩ": 3833, + "Ġunder": 3834, + "Ġnecessary": 3835, + "Ġhelpful": 3836, + "ĠCol": 3837, + "é»ijæ´ŀ": 3838, + "åģļåĩº": 3839, + "Ġcourse": 3840, + "Ġmat": 3841, + "Ġleg": 3842, + "Ġface": 3843, + "令": 3844, + "èī¯å¥½çļĦ": 3845, + "ock": 3846, + "åĮ»çĸĹ": 3847, + "çĽĸ": 3848, + "idence": 3849, + "Ġassociated": 3850, + "Ġprogress": 3851, + "åľĨ": 3852, + "Ġeveryone": 3853, + "ç¼ĵ": 3854, + "ĠEng": 3855, + "word": 3856, + "èĵĿ": 3857, + "天æ°Ķ": 3858, + "Ġactions": 3859, + "ems": 3860, + "ĠPl": 3861, + "å®Ļ": 3862, + "ush": 3863, + "顾": 3864, + "Ġcosts": 3865, + "ator": 3866, + "ç©¿": 3867, + "Ġamounts": 3868, + "èͬèıľ": 3869, + "..": 3870, + "Ġmanner": 3871, + "Ġconsequ": 3872, + "æ°ĶåĢĻ": 3873, + "Ġinsights": 3874, + "being": 3875, + "atory": 3876, + "ener": 3877, + "lex": 3878, + "Ġmeans": 3879, + "Ġcollaboration": 3880, + "Ġperspect": 3881, + "orm": 3882, + "priate": 3883, + "å°Ĭéĩį": 3884, + "Ġtarget": 3885, + "è®°å½ķ": 3886, + "åĢĴ": 3887, + "Ġrenewable": 3888, + "æĦ¿": 3889, + "èĥ½æºIJ": 3890, + "Ġinput": 3891, + "å®ĩå®Ļ": 3892, + "ape": 3893, + "Ġadjust": 3894, + "eries": 3895, + "Ġdire": 3896, + "ä¾Ŀ": 3897, + "ustr": 3898, + "fect": 3899, + "Ġbeautiful": 3900, + "Ġdue": 3901, + "reci": 3902, + "çĮ®": 3903, + "èĥĮæĻ¯": 3904, + "èĤ¡": 3905, + "Ġdam": 3906, + "ik": 3907, + "Ġadvanced": 3908, + "çĽ¸å¯¹": 3909, + "åIJįç§°": 3910, + "Ġshort": 3911, + "Ġobject": 3912, + "è¿ĻéĩĮ": 3913, + "éĢłæĪIJ": 3914, + "èIJ¥éĶĢ": 3915, + "çļĦæĥħæĦŁ": 3916, + "票": 3917, + "Ġcountries": 3918, + "ining": 3919, + "istic": 3920, + "Ġplans": 3921, + "责任": 3922, + "Ġstakehold": 3923, + "the": 3924, + "Ġassess": 3925, + "æĢĿèĢĥ": 3926, + "ech": 3927, + "æĪIJåijĺ": 3928, + "21": 3929, + "Ġdaily": 3930, + "Ġcomput": 3931, + "çļĦæĥħåĨµ": 3932, + "æıIJåĩº": 3933, + "ĠâĢľ": 3934, + "åªĴ": 3935, + "ä¸Ńå¿ĥ": 3936, + "ished": 3937, + "ĠSe": 3938, + "onomous": 3939, + "ern": 3940, + "ç»´æĬ¤": 3941, + "ames": 3942, + "Ġprioritize": 3943, + "纸": 3944, + "èĤ¥": 3945, + "Ġtemper": 3946, + "æ¸ħæ´ģ": 3947, + "use": 3948, + "污": 3949, + "Ġminim": 3950, + "æĺ¯åľ¨": 3951, + "大å°ı": 3952, + "åĵªäºĽ": 3953, + "Ġappreci": 3954, + "reng": 3955, + "Ġregulations": 3956, + "ĠZ": 3957, + "éĶĻ误": 3958, + "rans": 3959, + "èĢĮä¸Ķ": 3960, + "èά": 3961, + "èij±": 3962, + "èĨ": 3963, + "æ°´å¹³": 3964, + "è´Ńçī©": 3965, + "åŃĹ符串": 3966, + "对æĸ¹": 3967, + "Ġhim": 3968, + "Ġconsequences": 3969, + "å·´": 3970, + "é¼ĵåĬ±": 3971, + "Ġfil": 3972, + "人åijĺ": 3973, + "è·Ŀ离": 3974, + "ĠWhen": 3975, + "çļĦæ°´": 3976, + "çī©çIJĨ": 3977, + "åIJĮæĹ¶ä¹Ł": 3978, + "åľ¨è¿Ļ个": 3979, + "åħ¶æ¬¡": 3980, + ",\"": 3981, + "æ¶²": 3982, + "çĶ·": 3983, + "ival": 3984, + "åı¯ä»¥è®©": 3985, + "æĥ¯": 3986, + "Ġadvance": 3987, + "Ġveh": 3988, + "å¦ĤæŀľæĤ¨": 3989, + "Ġestab": 3990, + "ript": 3991, + "端": 3992, + "ä¸įä¼ļ": 3993, + "Ġtransparent": 3994, + "æķ°éĩı": 3995, + "çĽĺ": 3996, + "Ġspeak": 3997, + "Ġpark": 3998, + "Ġstakeholders": 3999, + "éº": 4000, + "Ġevent": 4001, + "çļĦæķ°æį®": 4002, + "èĩªåĬ¨": 4003, + "ç»ĨèĬĤ": 4004, + "è¯Ħä¼°": 4005, + "润": 4006, + "Ġpreferences": 4007, + "Ġveget": 4008, + "æįŁ": 4009, + "equ": 4010, + "Ġgl": 4011, + "Ġpain": 4012, + "ogra": 4013, + "Ġtraffic": 4014, + "Ġoce": 4015, + "ä¹ĺ": 4016, + "ext": 4017, + "âĢĿï¼Į": 4018, + "Ġanother": 4019, + "å¤ļå°ij": 4020, + "Ġagainst": 4021, + "ç»ıåİĨ": 4022, + "计ç®Ĺæľº": 4023, + "èĢIJ": 4024, + "软件": 4025, + "ĠPre": 4026, + "Ġplants": 4027, + "缸äºĴ": 4028, + "é¢ij": 4029, + "\\_": 4030, + "Ġsame": 4031, + "rug": 4032, + "Ġvalu": 4033, + "Ġocc": 4034, + "çļĦç¤": 4035, + "Ġsustainability": 4036, + "ĠShe": 4037, + "de": 4038, + "ote": 4039, + "Ġdig": 4040, + "NA": 4041, + "Ġcrucial": 4042, + "æī§": 4043, + "å±Ģ": 4044, + "æĭŁ": 4045, + "æĭĮ": 4046, + "Ġnon": 4047, + "Ġengaging": 4048, + "Ġintern": 4049, + "LP": 4050, + "温度": 4051, + "æł¸": 4052, + "æĬ¥åijĬ": 4053, + "æĿ¥è¶Ĭ": 4054, + "hood": 4055, + "ä¸ī个": 4056, + "å¦Ĥä¸ĭ": 4057, + "çī©ä½ĵ": 4058, + "force": 4059, + "Ġneeded": 4060, + "Ġimages": 4061, + "Ġbuilding": 4062, + "icious": 4063, + "ĠæĪij": 4064, + "è¶ĬæĿ¥è¶Ĭ": 4065, + "æĶ¾åħ¥": 4066, + "go": 4067, + "éĻįä½İ": 4068, + "å½ĵåľ°": 4069, + "æ¶Īè´¹èĢħ": 4070, + "ç£": 4071, + "iversity": 4072, + "é¢Ħç®Ĺ": 4073, + "icle": 4074, + "æ··åIJĪ": 4075, + "Ġparticip": 4076, + "Ġdishes": 4077, + "Ġthroughout": 4078, + "Ġwithin": 4079, + "åı³": 4080, + "é«ĺçļĦ": 4081, + "Ġphot": 4082, + "Ġtrust": 4083, + "æĦıè¯Ĩ": 4084, + "以确ä¿Ŀ": 4085, + "çĬ¶æĢģ": 4086, + "Ġautomation": 4087, + "11": 4088, + "Ġpost": 4089, + "æīĭæľº": 4090, + "works": 4091, + "éĢı": 4092, + "åºĵ": 4093, + "Ġwind": 4094, + "Ġ==": 4095, + "Ġprocessing": 4096, + "èĮĥåĽ´": 4097, + "æĦıä¹ī": 4098, + "追æ±Ĥ": 4099, + "é": 4100, + "å¾Ħ": 4101, + "éĿł": 4102, + "ä¸ĸ": 4103, + "èϽ": 4104, + "ç«ŀäºī": 4105, + "Ġappropriate": 4106, + "æĽ´å¥½çļĦ": 4107, + "Ġcharacter": 4108, + "cl": 4109, + "ç§ĺ": 4110, + "itude": 4111, + "Ġteac": 4112, + "leep": 4113, + "ĠDevelop": 4114, + "ince": 4115, + "å·¦": 4116, + "ground": 4117, + "è¡Įä¸ļ": 4118, + "éĴĪ对": 4119, + "å¿ħè¦ģ": 4120, + "Ġdeterm": 4121, + "----------------": 4122, + "Ġstreng": 4123, + "do": 4124, + "Ġchallenging": 4125, + "ork": 4126, + "Ġanx": 4127, + "èī²çļĦ": 4128, + "Ġhard": 4129, + "æĺİç¡®": 4130, + "åĪĨ享": 4131, + "æĶ¹åıĺ": 4132, + "ä½³": 4133, + "åıªæľī": 4134, + "å±ķ示": 4135, + "Ġcamp": 4136, + "纳": 4137, + "aj": 4138, + "etic": 4139, + "ument": 4140, + "ä½łåı¯ä»¥": 4141, + "Ġpollut": 4142, + "Ġhig": 4143, + "pping": 4144, + "ead": 4145, + "çĦ¶èĢĮ": 4146, + "第äºĮ": 4147, + "鸣": 4148, + "çī©åĵģ": 4149, + "举": 4150, + "Ġencourage": 4151, + "pecial": 4152, + "Ġacross": 4153, + "elves": 4154, + "äºĭä»¶": 4155, + "cle": 4156, + "æ©": 4157, + "åªĴä½ĵ": 4158, + "ners": 4159, + "Ġcal": 4160, + "èϽçĦ¶": 4161, + "åĽº": 4162, + "ä¹łæĥ¯": 4163, + "Ġsafe": 4164, + "èĥ½éĩı": 4165, + "istics": 4166, + "ä¹ĭåīį": 4167, + "Ġissue": 4168, + "å¤ļ个": 4169, + "åĨ³çŃĸ": 4170, + "è¾¾åΰ": 4171, + "æĹ©": 4172, + "ä¸įåı¯": 4173, + "ä¸Ģ缴": 4174, + "å·¨": 4175, + "æĦŁè°¢": 4176, + "ĠNew": 4177, + "ä¸Ģ段": 4178, + "Ġmachines": 4179, + "å°Ĩåħ¶": 4180, + "ç»§ç»Ń": 4181, + "Ġword": 4182, + "çī¹åĪ«": 4183, + "Ġagriculture": 4184, + "æĢİ": 4185, + "éĢIJæ¸IJ": 4186, + "éĵ¾": 4187, + "课": 4188, + "Ġkind": 4189, + "å¢Ļ": 4190, + "谢谢": 4191, + "Ġalgorithm": 4192, + "è£ħ饰": 4193, + "Ġalong": 4194, + "Ġeasy": 4195, + "äºij": 4196, + "è§£åĨ³æĸ¹æ¡Ī": 4197, + "Ġawareness": 4198, + "'ve": 4199, + "æĸ¹åIJij": 4200, + "Ġnever": 4201, + "Ġquickly": 4202, + "Ġrespect": 4203, + "çļĦæĻ": 4204, + "Ġamong": 4205, + "Ġaccountability": 4206, + "Ġlaw": 4207, + "ening": 4208, + "Ġdefin": 4209, + "Ġsurround": 4210, + "éĵģ": 4211, + "Ġpowerful": 4212, + "An": 4213, + "Ġcause": 4214, + "æ¥": 4215, + "æİĮæı¡": 4216, + "è¿ĺæĺ¯": 4217, + "Ġcreative": 4218, + "è¡Ģ": 4219, + "Ġlocated": 4220, + "unning": 4221, + "åľ°åĮº": 4222, + "éĿ¢ç§¯": 4223, + "鼨": 4224, + "Ġnear": 4225, + "Ġiniti": 4226, + "ression": 4227, + "ä¸ĭæĿ¥": 4228, + "25": 4229, + "é©¶": 4230, + "¾çĹħ": 4231, + "ables": 4232, + "æľīè¶£": 4233, + "循çݯ": 4234, + "çŃĶæ¡Ī": 4235, + "çł´": 4236, + "ication": 4237, + "éĻ¢": 4238, + "æ²»çĸĹ": 4239, + "Ġaddition": 4240, + "äºĭæĥħ": 4241, + "Ġbecause": 4242, + "åıĪ": 4243, + "èĤĮ": 4244, + "纪": 4245, + "side": 4246, + "æĭħ": 4247, + "湿": 4248, + "åįĬ": 4249, + "顺": 4250, + "ĠAnd": 4251, + "Ġrestaurant": 4252, + "Ġvide": 4253, + "Ġproblem": 4254, + "azing": 4255, + "Ġmembers": 4256, + "Ġnut": 4257, + "Ġcou": 4258, + "浪": 4259, + "Ġè¿Ļ": 4260, + "Ġhelping": 4261, + "ĠIs": 4262, + "æıIJåįĩ": 4263, + "ĠĠĠĠĠĠ": 4264, + "Ġsho": 4265, + "Ġrelev": 4266, + "Ġarg": 4267, + "Ġbalance": 4268, + "illed": 4269, + "æĺ¯ä»Ģä¹Ī": 4270, + "åĬĽéĩı": 4271, + "ired": 4272, + "å¤ľ": 4273, + "åı¯æĮģç»Ń": 4274, + "Ġperfect": 4275, + "**": 4276, + "ification": 4277, + "æ¶ī": 4278, + "Ġwildlife": 4279, + "ane": 4280, + "Ġrelated": 4281, + "室åĨħ": 4282, + "åºľ": 4283, + "享åıĹ": 4284, + "ours": 4285, + "è·ij": 4286, + "åķĨä¸ļ": 4287, + "aching": 4288, + "Ġsun": 4289, + "Ġrecognition": 4290, + "elt": 4291, + "Ġorder": 4292, + "å¹³åĿĩ": 4293, + "ging": 4294, + "临": 4295, + "çĤ¼": 4296, + "Ġgoing": 4297, + "åij¼åIJ¸": 4298, + "Ġsoftware": 4299, + "Ġremot": 4300, + "èijĹåIJį": 4301, + "幸ç¦ı": 4302, + "Ġenhance": 4303, + "èĻļ": 4304, + "Ġnow": 4305, + "Ġthreat": 4306, + "Ġdest": 4307, + "åĿĩåĮĢ": 4308, + "Ġacad": 4309, + "åºĶ对": 4310, + "çľĭåΰ": 4311, + "cast": 4312, + "è¾Ĩ": 4313, + "ificial": 4314, + "Ġvery": 4315, + "ook": 4316, + "åĮºåŁŁ": 4317, + "¹ģ": 4318, + "æĪ¿éĹ´": 4319, + "æıIJä¾ĽäºĨ": 4320, + "Ġmotiv": 4321, + "Ġaccessible": 4322, + "åĨ³å®ļ": 4323, + "Ġhy": 4324, + "å®Ī": 4325, + "Ġflo": 4326, + "ug": 4327, + "Ġinformed": 4328, + "åĵģè´¨": 4329, + "çļĦçŁ": 4330, + "aves": 4331, + "arr": 4332, + "ĠWith": 4333, + "let": 4334, + "è§ĤçĤ¹": 4335, + "enge": 4336, + "è¡ĮåĬ¨": 4337, + "friend": 4338, + "ç³ķ": 4339, + "Ġfurther": 4340, + "ĠEns": 4341, + "ç§ģ": 4342, + "Ġado": 4343, + "Ġclean": 4344, + "缸åºĶ": 4345, + "Ġfre": 4346, + "pecially": 4347, + "èĹ": 4348, + "Ġcapt": 4349, + "çļĦçľ": 4350, + "Ġsomeone": 4351, + "Ġcell": 4352, + "æĶ¾åľ¨": 4353, + "欢è¿İ": 4354, + "ĠâĢ": 4355, + "Ġdevices": 4356, + "çļĦæĸ¹å¼ı": 4357, + "Ġjobs": 4358, + "augh": 4359, + "not": 4360, + "æľīäºĽ": 4361, + "åħ¬åħ±": 4362, + "gest": 4363, + "çļĦçĶŁæ´»": 4364, + "çľ¼": 4365, + "çļĦä¿¡æģ¯": 4366, + "ĠCons": 4367, + "æİĴåºı": 4368, + "Ġbenefit": 4369, + "rect": 4370, + "å¤ı": 4371, + "unte": 4372, + "符åIJĪ": 4373, + "ä¸Ģä½į": 4374, + "åĨħéĥ¨": 4375, + "Ġlooking": 4376, + "ding": 4377, + "æĬĺ": 4378, + "è¾ij": 4379, + "è¿Ļ个éĹ®é¢ĺ": 4380, + "Ġespecially": 4381, + "çľł": 4382, + "âĢĿãĢĤ": 4383, + "å¥ı": 4384, + "ray": 4385, + "è¿ĺåı¯ä»¥": 4386, + "åĪĽä½ľ": 4387, + "coming": 4388, + "Ġmultiple": 4389, + "éļIJ": 4390, + "泡": 4391, + "æłĩåĩĨ": 4392, + "Ġmil": 4393, + "éľĢè¦ģ注æĦı": 4394, + "Ġanxiety": 4395, + "æĶ¹è¿Ľ": 4396, + "å±ĭ": 4397, + "污æŁĵ": 4398, + "ç¼ĸç¨ĭ": 4399, + "è´¹ç͍": 4400, + "Ġevalu": 4401, + "imately": 4402, + "Ġliter": 4403, + "ograph": 4404, + "Ġsearch": 4405, + "16": 4406, + "enced": 4407, + "Ġmethods": 4408, + "çĥĪ": 4409, + "模å¼ı": 4410, + "çĬ¶åĨµ": 4411, + "æĶ¹åĸĦ": 4412, + "å¤ļæł·": 4413, + "cer": 4414, + "å¥ĸ": 4415, + "Ġsatis": 4416, + "Ġwebsite": 4417, + "åĬŀ": 4418, + "åģ¥èº«": 4419, + "Ġglobal": 4420, + "Ġask": 4421, + "Ġplatforms": 4422, + "Ġdiseases": 4423, + "çݰ象": 4424, + "tics": 4425, + "æ±ģ": 4426, + "åΤæĸŃ": 4427, + "Ġconvers": 4428, + "Ġrelationship": 4429, + "设置": 4430, + "æ³ķå¾ĭ": 4431, + "Ġmindful": 4432, + "é¢Ħæµĭ": 4433, + "overy": 4434, + "åģľ": 4435, + "ç͵è§Ĩ": 4436, + "è§ĦåĪĻ": 4437, + "aken": 4438, + "Ġimplementing": 4439, + "ising": 4440, + "åıĤåĬł": 4441, + "æĥħ绪": 4442, + "Ġprovided": 4443, + "æ·±åħ¥": 4444, + "Ġprogrammed": 4445, + "Ġrelevant": 4446, + "çļĦçĥ": 4447, + "çĸ¾çĹħ": 4448, + "åĮ»çĶŁ": 4449, + "åĪĽå»º": 4450, + "Ġgenerate": 4451, + "æĶ¶åħ¥": 4452, + "ä¼ij": 4453, + "izes": 4454, + "Ġtransform": 4455, + "éģµ": 4456, + "astic": 4457, + "åijĪ": 4458, + "æ¯ı个人": 4459, + "è¿Ķ": 4460, + "iet": 4461, + "Ġvoice": 4462, + "éĢĶ": 4463, + "æĶ¾æĿ¾": 4464, + "åį´": 4465, + "èĥľ": 4466, + "Ġstructure": 4467, + "æĹ¶å°ļ": 4468, + "ĠQ": 4469, + "Ġelse": 4470, + "duc": 4471, + "Ġemp": 4472, + "èģļ": 4473, + "è´§": 4474, + "aches": 4475, + "ç§Ģ": 4476, + "anks": 4477, + "Ġnight": 4478, + "Ġprofessionals": 4479, + "Ġbas": 4480, + "è´µ": 4481, + "ec": 4482, + "Ġdiversity": 4483, + "ites": 4484, + "dr": 4485, + "åĽ°éļ¾": 4486, + "ĥåľ": 4487, + "åŀĥåľ": 4488, + "åŀĥåľ¾": 4489, + "Ġdrug": 4490, + "碳": 4491, + "Ġname": 4492, + "åĮĸçļĦ": 4493, + "aid": 4494, + "æľĢ大": 4495, + "æijĦ": 4496, + "ç®ĢåįķçļĦ": 4497, + "Ġwarm": 4498, + "Ġdone": 4499, + "Ġfunction": 4500, + "asc": 4501, + "强è°ĥ": 4502, + "Ġdemand": 4503, + "Ġvisual": 4504, + "Ġupd": 4505, + "æŃ£åľ¨": 4506, + "Ġsimilar": 4507, + "éĢĴ": 4508, + "æ¯Ľ": 4509, + "éĶ»": 4510, + "ently": 4511, + "Ġvaluable": 4512, + "Ġdisaster": 4513, + "ä¸Ģèά": 4514, + "æ´²": 4515, + "ĠReg": 4516, + "Ġdiscrimination": 4517, + "åĨĻä¸Ģç¯ĩ": 4518, + "Ġgovernment": 4519, + "Ġ好çļĦ": 4520, + "500": 4521, + "lying": 4522, + "Ġprev": 4523, + "Ġprepare": 4524, + "Ġproblems": 4525, + "è·³": 4526, + "Ġprom": 4527, + "åĨ²": 4528, + "å®īè£ħ": 4529, + "éĶ»çĤ¼": 4530, + "æµĵ": 4531, + "è¹": 4532, + "åºĶç͍ç¨ĭåºı": 4533, + "ng": 4534, + "Ġcompet": 4535, + "åĪĨåĪ«": 4536, + "ological": 4537, + "审": 4538, + "Ġtransl": 4539, + "Ġdirect": 4540, + "åīĤ": 4541, + "Ġsuggestions": 4542, + "Ġpaper": 4543, + "Ġrecognize": 4544, + "ton": 4545, + "Ġmitigate": 4546, + "讨论": 4547, + "äºĴåĬ¨": 4548, + "ĠEar": 4549, + "Ġamazing": 4550, + "cre": 4551, + "é¦Ī": 4552, + "Ġinvolved": 4553, + "face": 4554, + "æľīåħ³": 4555, + "))": 4556, + "Ġexce": 4557, + "Ġproductivity": 4558, + "èŃ": 4559, + "é¦Ĩ": 4560, + "Ġsounds": 4561, + "Ġidentifying": 4562, + "],": 4563, + "é¾Ļ": 4564, + "Ġfit": 4565, + "Ġcontribute": 4566, + "ths": 4567, + "friendly": 4568, + "ele": 4569, + "ified": 4570, + "iveness": 4571, + "itely": 4572, + "ĠX": 4573, + "Ġled": 4574, + "åĿı": 4575, + "Ġhistor": 4576, + "Ġdat": 4577, + "Ġjourney": 4578, + "Ġ}": 4579, + "Ġselect": 4580, + "漫": 4581, + "Ġconduct": 4582, + "è¿Ľä¸ĢæŃ¥": 4583, + "ç»ĻæĪij": 4584, + "Ġlif": 4585, + "è£ħä¿®": 4586, + "为ä»Ģä¹Ī": 4587, + "京": 4588, + "Ġnav": 4589, + "Ġwhole": 4590, + "ç¹ģ": 4591, + "åĨľ": 4592, + "æĶ»": 4593, + "Ġbreat": 4594, + "Ġmiss": 4595, + "é¾Ħ": 4596, + "tt": 4597, + "sw": 4598, + "Ġbar": 4599, + "请éĹ®": 4600, + "èģĶç½ij": 4601, + "Ġattract": 4602, + "æĤ¨åı¯ä»¥": 4603, + "One": 4604, + "åħħåĪĨ": 4605, + "ring": 4606, + "Ġå½ĵçĦ¶": 4607, + "ream": 4608, + "Ġevol": 4609, + "Ġsn": 4610, + "ĠEm": 4611, + "mosp": 4612, + "Ġchoose": 4613, + "view": 4614, + "Ġarr": 4615, + "Ġsleep": 4616, + "ended": 4617, + "æŀ¶": 4618, + "Ġvehicles": 4619, + "Ġfresh": 4620, + "Ġorganization": 4621, + "è¿Ļ段": 4622, + "汤": 4623, + "ĠInt": 4624, + "Ġcontext": 4625, + "åı¦å¤ĸ": 4626, + "Ġocean": 4627, + "æĦŁåıĹ": 4628, + "Ġpollution": 4629, + "urb": 4630, + "æī§è¡Į": 4631, + "ersonal": 4632, + "ĠHealth": 4633, + "ä¼ĺçĤ¹": 4634, + "Ġattention": 4635, + "æľīçĿĢ": 4636, + "é£ŁæĿIJ": 4637, + "Ġerr": 4638, + "çļĦæĿ¥": 4639, + "çļĦçĪ": 4640, + "èѦ": 4641, + "è·Ł": 4642, + "æĹħè¡Į": 4643, + "èĴľ": 4644, + "çļĦæĢĿ": 4645, + "Ġchatbot": 4646, + "çļĦéľĢæ±Ĥ": 4647, + "çķ¥": 4648, + "Ġfeeling": 4649, + "Ġimplemented": 4650, + "社åĮº": 4651, + "çļĦ建议": 4652, + "æIJħ": 4653, + "éĹ»": 4654, + "åıįé¦Ī": 4655, + "缴æİ¥": 4656, + "æĺ¥": 4657, + "itable": 4658, + "æĪijä¼ļ": 4659, + "åį±": 4660, + "èī¯å¥½": 4661, + "Ġliving": 4662, + "åıĺéĩı": 4663, + "ĠBut": 4664, + "Ġcomplete": 4665, + "Ġtrends": 4666, + "Ġmakes": 4667, + "ä»Ĭ天": 4668, + "Ġdistribut": 4669, + "Ġcommit": 4670, + "Ġatmosp": 4671, + "ä¼´": 4672, + "Ġsensors": 4673, + "Ġsw": 4674, + "æĹłè®º": 4675, + "omen": 4676, + "æĶ¿åºľ": 4677, + "Ġchallenge": 4678, + "Ġturn": 4679, + "çIJĨ论": 4680, + "par": 4681, + "Ġwrite": 4682, + "ç»ıåħ¸": 4683, + "emember": 4684, + "é¥Ń": 4685, + "æĸ¹ä¾¿": 4686, + "Ġcu": 4687, + "Ġvalue": 4688, + "Ġfund": 4689, + "pose": 4690, + "è°ĥæŁ¥": 4691, + "çĿ¡": 4692, + "Ġcommunicate": 4693, + "Ġdisease": 4694, + "Ġresearc": 4695, + "Ġlack": 4696, + "arning": 4697, + "ĠPark": 4698, + "çĦ¦": 4699, + "é«ĺ度": 4700, + "Ġrather": 4701, + "宣": 4702, + "çζ": 4703, + "éĺ¶": 4704, + "订": 4705, + "çĥ§": 4706, + "Ġhigher": 4707, + "Ġsummary": 4708, + "ĠAut": 4709, + "çļĦæ³": 4710, + "Ġele": 4711, + "isms": 4712, + "Ġreli": 4713, + "ä¹Łä¼ļ": 4714, + "fra": 4715, + "åijĬè¯īæĪij": 4716, + "æĬ½": 4717, + "Ġsituations": 4718, + "Ġmarine": 4719, + "æĥ³è¦ģ": 4720, + "inci": 4721, + "inal": 4722, + "Ġgain": 4723, + "Ġdifference": 4724, + "æľºåĻ¨äºº": 4725, + "æµģç¨ĭ": 4726, + "ĠChat": 4727, + "ç½ijç«Ļ": 4728, + "æľ«": 4729, + "Ġcolor": 4730, + "Ġaspect": 4731, + "ç½Ĺ": 4732, + "ĠEduc": 4733, + "Ġdeploy": 4734, + "Ġbeauty": 4735, + "æĤ£": 4736, + "ruction": 4737, + "itut": 4738, + "æĿŁ": 4739, + "让æĪij们": 4740, + "éķ¿åº¦": 4741, + "ules": 4742, + "æ¶īåıĬ": 4743, + "Ġdigital": 4744, + "Ġexisting": 4745, + "ĠOr": 4746, + "\\_\\_": 4747, + "Ġbackground": 4748, + "çĹĩ": 4749, + "æ¯ı天": 4750, + "python": 4751, + "Ġfarmers": 4752, + "Ġcontinu": 4753, + "\":": 4754, + "Ġgiven": 4755, + "å°ıæĹ¶": 4756, + "Ġmoment": 4757, + "200": 4758, + "John": 4759, + "éĿ¢å¯¹": 4760, + "Ġintro": 4761, + "Ġtherapy": 4762, + "è¿ĶåĽŀ": 4763, + "å¹¶åľ¨": 4764, + "Ġz": 4765, + "Ġafford": 4766, + "ä¸Ŀ": 4767, + "宽": 4768, + "ĠÃ": 4769, + "ĠNational": 4770, + "èĥ¡": 4771, + "Ġexercise": 4772, + "æIJħæĭĮ": 4773, + "æĶ¯ä»ĺ": 4774, + "éĺ³åħī": 4775, + "è¯ļ": 4776, + "Ġsect": 4777, + "ĠSu": 4778, + "å¢ŀéķ¿": 4779, + "ç¾İ丽": 4780, + "Ġwa": 4781, + "以ä¸ĭæĺ¯ä¸ĢäºĽ": 4782, + "èĽĭç³ķ": 4783, + "Ġill": 4784, + "æ¸ħæĻ": 4785, + "etry": 4786, + "梦": 4787, + "ç¾İåĽ½": 4788, + "ä»į": 4789, + "oney": 4790, + "Ġecosystems": 4791, + "æĮĩ导": 4792, + "def": 4793, + "99": 4794, + "æŁĶ": 4795, + "pped": 4796, + "Ġlimit": 4797, + "çİī": 4798, + "Ġacademic": 4799, + "Ġrestaurants": 4800, + "Ġhead": 4801, + "ä¿¡ä»»": 4802, + "asters": 4803, + "å²ģ": 4804, + "akers": 4805, + "14": 4806, + "As": 4807, + "æł¡": 4808, + "é«ĺæķĪ": 4809, + "phas": 4810, + "yn": 4811, + "ç¨ĭ度": 4812, + "è¾£": 4813, + "ä¸ĬéĿ¢": 4814, + "å®¶å±ħ": 4815, + "term": 4816, + "ç¾İé£Ł": 4817, + "Ġovers": 4818, + "å®ĺ": 4819, + "Ġindic": 4820, + "ĠYour": 4821, + "St": 4822, + "形象": 4823, + "è´¡": 4824, + "åºĬ": 4825, + "ĠSc": 4826, + "agra": 4827, + "羣æŃ£": 4828, + "oint": 4829, + "ids": 4830, + "arent": 4831, + "éĵ¶": 4832, + "èģĬ": 4833, + "Ġregular": 4834, + "ä¼ĺç§Ģ": 4835, + "Ġcolle": 4836, + "çĸij": 4837, + "Ġsubject": 4838, + "Ġgreater": 4839, + "Ġstore": 4840, + "åŁ¹è®Ń": 4841, + "Ġimag": 4842, + "Ġansw": 4843, + "ä½Ļ": 4844, + "Ġspot": 4845, + "åĪĨåŃIJ": 4846, + "Ġaudience": 4847, + "pet": 4848, + "Ġvers": 4849, + "Ġtrail": 4850, + "åĭĩ": 4851, + "erous": 4852, + "Ġguidance": 4853, + "Ġspeech": 4854, + "åĵ²": 4855, + "æĺ¯çͱ": 4856, + "è´¡çĮ®": 4857, + "åIJĪéĢĤçļĦ": 4858, + "设æĸ½": 4859, + "ä»ĸ人": 4860, + "ensive": 4861, + "å̾": 4862, + "aling": 4863, + "Ġprojects": 4864, + "å³": 4865, + "Ġtakes": 4866, + "绩": 4867, + "That": 4868, + "Ġbro": 4869, + "ived": 4870, + "Ġ&": 4871, + "åĿIJ": 4872, + "placement": 4873, + "è¿ŀæİ¥": 4874, + "çļĦ社": 4875, + "ĠTra": 4876, + "Ġrelax": 4877, + "ufact": 4878, + "éģį": 4879, + "Ġsurv": 4880, + "åı£åij³": 4881, + "Ġcreativity": 4882, + "of": 4883, + "å¨ģ": 4884, + "çļĦçł": 4885, + "Ġbreath": 4886, + "Ġplaces": 4887, + "Ġdescrib": 4888, + "èĭ±è¯Ń": 4889, + "Ġdamage": 4890, + "oration": 4891, + "为æĤ¨": 4892, + "ift": 4893, + "Ġcase": 4894, + "å¹´é¾Ħ": 4895, + "Ġpress": 4896, + "çĶľ": 4897, + "éĩİ": 4898, + "æĹħ游": 4899, + "Ġtaken": 4900, + "ined": 4901, + "Ġconcept": 4902, + "æĴŃ": 4903, + "Ġinteresting": 4904, + "è·µ": 4905, + "Ġsea": 4906, + "60": 4907, + "Ġfoot": 4908, + "ĠName": 4909, + "Ġresearchers": 4910, + "éĢģ": 4911, + "Ġwee": 4912, + ");": 4913, + "çļĦåħ³éĶ®": 4914, + "ä¼½": 4915, + "elebr": 4916, + "å¡ij": 4917, + "We": 4918, + "ç»ı常": 4919, + "Ġpopulations": 4920, + "åħ¬å¼ı": 4921, + "orn": 4922, + "çĩĥ": 4923, + "人çĶŁ": 4924, + "17": 4925, + "æİ¥åıĹ": 4926, + "Ġlocation": 4927, + "Ġinequ": 4928, + "Ġintervent": 4929, + "Ġinterested": 4930, + "Ġdefinitely": 4931, + "Ġassistance": 4932, + "è¿Ļä¸Ģ": 4933, + "åIJĪåIJĮ": 4934, + "ä¼ĺåĬ¿": 4935, + "çļĦå·¥ä½ľ": 4936, + "Ġ12": 4937, + "Ġmov": 4938, + "åģı": 4939, + "åŃĺåĤ¨": 4940, + "usive": 4941, + "æĹı": 4942, + "ï¼īï¼Į": 4943, + "Ġgas": 4944, + "Ġinterests": 4945, + "æ¸ħæĻ°": 4946, + "Ġgard": 4947, + "çĸ«": 4948, + "Ġsay": 4949, + "夫": 4950, + "ges": 4951, + "èIJ¨": 4952, + "ä¸ļåĬ¡": 4953, + "个æĢ§": 4954, + "åIJ¯": 4955, + "Ġengagement": 4956, + "Ġbig": 4957, + "éľĢè¦ģèĢĥèĻij": 4958, + "Ġprinci": 4959, + "åij¨åĽ´": 4960, + "Ġopportunity": 4961, + "çģ¾": 4962, + "èĹı": 4963, + "rel": 4964, + "缺çĤ¹": 4965, + "Ġhappy": 4966, + "åĴĮåħ¶ä»ĸ": 4967, + "ava": 4968, + "Ġestablish": 4969, + "鸡èĽĭ": 4970, + "iking": 4971, + "ĠTrans": 4972, + "rastructure": 4973, + "forest": 4974, + "èİ·åıĸ": 4975, + "èĦļ": 4976, + "inally": 4977, + "èµı": 4978, + "Ġdelicious": 4979, + "Ġresults": 4980, + "è§Ĥå¯Ł": 4981, + "å®ŀè·µ": 4982, + "Ġlast": 4983, + "Ġpolit": 4984, + "æĢ§èĥ½": 4985, + "For": 4986, + "bi": 4987, + "çĽ¸ä¿¡": 4988, + "ffee": 4989, + "Ġphr": 4990, + "Ġforest": 4991, + "elling": 4992, + "æµģè¡Į": 4993, + "atic": 4994, + "大家": 4995, + "ĠInst": 4996, + "æķ°åѦ": 4997, + "æī©": 4998, + "å®Įåħ¨": 4999, + "å¼ķèµ·": 5000, + "ese": 5001, + "转æį¢": 5002, + "Ġaffected": 5003, + "Ġrobotics": 5004, + "综ä¸Ĭ": 5005, + "Ġprop": 5006, + "让人": 5007, + "æ²³": 5008, + "ä¸ŃæľĢ": 5009, + "Ġautonomous": 5010, + "Ġhaving": 5011, + "Ġtrip": 5012, + "ury": 5013, + "Ġbiased": 5014, + "Ġconsiderations": 5015, + "Ġparticular": 5016, + "åįł": 5017, + "æİ¨å¹¿": 5018, + "Ġinitiatives": 5019, + "ials": 5020, + "åij³éģĵ": 5021, + "Ġtreatments": 5022, + "Ġemphas": 5023, + "çĭ¬çī¹çļĦ": 5024, + "Ġlay": 5025, + "æĶ¿çŃĸ": 5026, + "æĢİä¹Ī": 5027, + "ronic": 5028, + "play": 5029, + "Ġcook": 5030, + "è¿Ľåħ¥": 5031, + "è½®": 5032, + "Ġvolunte": 5033, + "Ġrain": 5034, + "ĠMon": 5035, + "Ġconsumption": 5036, + "èĽĭçϽ": 5037, + "ĠSoc": 5038, + "壤": 5039, + "Ġroutine": 5040, + "Ġimproved": 5041, + "To": 5042, + "人çī©": 5043, + "读èĢħ": 5044, + "Ġgoal": 5045, + "广åijĬ": 5046, + "éķ¿æľŁ": 5047, + "Ġey": 5048, + "He": 5049, + "Ġoutdo": 5050, + "Ġcuis": 5051, + "Ġaway": 5052, + "Ġbooks": 5053, + "Ġtopic": 5054, + "大åĪ©": 5055, + "house": 5056, + "Ġones": 5057, + "ç§Ł": 5058, + "':": 5059, + "æĪ¿å±ĭ": 5060, + "ç§»åĬ¨": 5061, + "Ġdisasters": 5062, + "ests": 5063, + "illing": 5064, + "绿èī²": 5065, + "åĵ²åѦ": 5066, + "æĪIJåĪĨ": 5067, + "Ġoccur": 5068, + "ľä¼½": 5069, + "åľŁå£¤": 5070, + "çļĦ主è¦ģ": 5071, + "çݰå®ŀ": 5072, + "Ġanimal": 5073, + "é¢Ĩ导": 5074, + "Ġviews": 5075, + "éĤ®": 5076, + "æ°§åĮĸ": 5077, + "athy": 5078, + "éģĵå¾·": 5079, + "社交åªĴä½ĵ": 5080, + "ĠPersonal": 5081, + "ĽåĽ´": 5082, + "Ġpurch": 5083, + "Ġcountry": 5084, + "Ġremind": 5085, + "寸": 5086, + "Ġrights": 5087, + "çļĦçݯå¢ĥ": 5088, + "ĠPr": 5089, + "Ġline": 5090, + "ibr": 5091, + "驾": 5092, + "Ġmaj": 5093, + "Ġovercome": 5094, + "Ġnext": 5095, + "æīĢè¿°": 5096, + "è§Ħå®ļ": 5097, + "Ġinteractions": 5098, + "Ġconflic": 5099, + "Ġwhy": 5100, + "ç³»åĪĹ": 5101, + "å°¼": 5102, + "ibly": 5103, + "çīĽå¥¶": 5104, + "Ġresponses": 5105, + "ses": 5106, + "åѦä¼ļ": 5107, + "bol": 5108, + "Ġstandards": 5109, + "ulner": 5110, + "对è¯ĿåĨħ容": 5111, + "lished": 5112, + "çļĦæĢ§": 5113, + "çĶŁæĢģç³»ç»Ł": 5114, + "ann": 5115, + "æĥħåĨµä¸ĭ": 5116, + "寻æ±Ĥ": 5117, + "Ġhold": 5118, + "den": 5119, + "åįĥ": 5120, + "Ġmention": 5121, + "ĠMany": 5122, + "缴åΰ": 5123, + "éģĹ": 5124, + "hel": 5125, + "Ġbelieve": 5126, + "aries": 5127, + "æľīä¸Ģ个": 5128, + "13": 5129, + "Ġatmosphere": 5130, + "Ġmor": 5131, + "æĹ¥æľŁ": 5132, + "ä¹ħ": 5133, + "ä½łå¥½": 5134, + "Ġaddressing": 5135, + "ĠâĢĵ": 5136, + "çļĦåľ°æĸ¹": 5137, + "ming": 5138, + "Ġcannot": 5139, + "Ġmanufact": 5140, + "Ġpie": 5141, + "icing": 5142, + "Ġstudies": 5143, + "ç¾İåij³": 5144, + "ĠAmerican": 5145, + "ĠNLP": 5146, + "Ġaccording": 5147, + "mselves": 5148, + "èĦĤ": 5149, + "èĩªä¿¡": 5150, + "æīĢéľĢ": 5151, + "Ġthemselves": 5152, + "Ġremote": 5153, + "åŁ¹åħ»": 5154, + "å®īæİĴ": 5155, + "ä½łéľĢè¦ģ": 5156, + "Ġregard": 5157, + "iring": 5158, + "è¯ĨåĪ«": 5159, + "Ġarticle": 5160, + "æģĴ": 5161, + "æĢ»çļĦæĿ¥": 5162, + "Ġalign": 5163, + "æ±ł": 5164, + "tenance": 5165, + "faction": 5166, + "åĬ¨ä½ľ": 5167, + "çļĦç©": 5168, + "缩": 5169, + "æĢ¥": 5170, + "Ġ100": 5171, + "Ġtesting": 5172, + "åŃĹæ¯į": 5173, + "å¹´è½»": 5174, + "åζéĢł": 5175, + "Ġswe": 5176, + "å°º": 5177, + "hens": 5178, + "æ°´æŀľ": 5179, + "Ġinfrastructure": 5180, + "èī²å½©": 5181, + "æĢ»çļĦæĿ¥è¯´": 5182, + "æľīä»Ģä¹Ī": 5183, + "text": 5184, + "车è¾Ĩ": 5185, + "Ġpay": 5186, + "rop": 5187, + "ĊĠĠ": 5188, + "Ġcaused": 5189, + "Ġcorrect": 5190, + "Ġì": 5191, + "èĥŀ": 5192, + "ĠMed": 5193, + "ç²¾ç¥ŀ": 5194, + "æ°ĶåĢĻåıĺåĮĸ": 5195, + "ĠRed": 5196, + "äºĴèģĶç½ij": 5197, + "Ġengage": 5198, + "åĪĨ为": 5199, + "ĠData": 5200, + "Ġfull": 5201, + "enc": 5202, + "éĩįæĸ°": 5203, + "æŃ£ç¡®çļĦ": 5204, + "çļĦæ°Ķ": 5205, + "åıĮæĸ¹": 5206, + "Ġcomes": 5207, + "åı¤ä»£": 5208, + "æŁIJäºĽ": 5209, + "åijĪçݰ": 5210, + "Ġtoday": 5211, + "aged": 5212, + "æĪijåı¯ä»¥": 5213, + "æĹ¥å¸¸": 5214, + "æ»ij": 5215, + "Ġclin": 5216, + "Ġ\\": 5217, + "Ġobs": 5218, + "Ġartificial": 5219, + "Ġexcell": 5220, + "çļĦç¬": 5221, + "alls": 5222, + "Ġproduce": 5223, + "ĠDes": 5224, + "oss": 5225, + "è¹Ī": 5226, + "Ġdraw": 5227, + "Ġletter": 5228, + "Ġadvice": 5229, + "Ġhighly": 5230, + "çĬ¯": 5231, + "综ä¸ĬæīĢè¿°": 5232, + "满æĦı": 5233, + "Ġprinciples": 5234, + "èĮĦ": 5235, + "Ġfeelings": 5236, + "çļĦæ´": 5237, + "Ġhom": 5238, + "Ġfail": 5239, + "Ġcrop": 5240, + "å§ľ": 5241, + "Ġquestion": 5242, + "Ġdisabilities": 5243, + "èĪŀè¹Ī": 5244, + "Ġimplications": 5245, + "ral": 5246, + "Ġsing": 5247, + "40": 5248, + "Ġfamil": 5249, + "Ġgovernments": 5250, + "Ġrecord": 5251, + "å½¢çĬ¶": 5252, + "Ġbegin": 5253, + "ises": 5254, + "çļĦæĥ³": 5255, + "achine": 5256, + "è°±": 5257, + "Ġvulner": 5258, + "Ġproper": 5259, + "Ġoversight": 5260, + "è´ŁéĿ¢": 5261, + "Ġemail": 5262, + "Ġnews": 5263, + "Ġexploring": 5264, + "Ġfavor": 5265, + "楼": 5266, + "å®ľ": 5267, + "Ġunivers": 5268, + "å·®å¼Ĥ": 5269, + "ï¼īãĢĤ": 5270, + "è§£åĨ³éĹ®é¢ĺ": 5271, + "Ġfamous": 5272, + "gn": 5273, + "Ġmessage": 5274, + "atitude": 5275, + "Ġcra": 5276, + "Ġcover": 5277, + "æ·±åĪ»": 5278, + "åı¯ä»¥éĢīæĭ©": 5279, + "çĶŁæ´»ä¸Ń": 5280, + "ç§įç±»": 5281, + "Ġsmart": 5282, + "onstr": 5283, + "vey": 5284, + "çͲ": 5285, + "Ġregularly": 5286, + "ĠSm": 5287, + "æĦŁè§ī": 5288, + "Ġthought": 5289, + "Ġexh": 5290, + "cure": 5291, + "ç»ĺ": 5292, + "认è¯Ĩ": 5293, + "Ġold": 5294, + "æĦī": 5295, + "称为": 5296, + "Ġfields": 5297, + "Ġconsist": 5298, + "ãģ": 5299, + "ç»Ĩèĥŀ": 5300, + "Ġhours": 5301, + "80": 5302, + "alking": 5303, + "è§īå¾Ĺ": 5304, + "ç»Ŀ": 5305, + "ä½łä»¬": 5306, + "ĠEnglish": 5307, + "Ġsignificantly": 5308, + "Ġsource": 5309, + "Ġant": 5310, + "Ġeducational": 5311, + "Ġtask": 5312, + "Ġhandle": 5313, + "æIJľ": 5314, + "ĠSp": 5315, + "Ġcalled": 5316, + "Ġterms": 5317, + "æ²ī": 5318, + "Ġwin": 5319, + "duction": 5320, + "Ġmodern": 5321, + "Ġcuisine": 5322, + "å¥Ĺ": 5323, + "触": 5324, + "olutely": 5325, + "ç«¥": 5326, + "pite": 5327, + "Ġfelt": 5328, + "Ġcompre": 5329, + "Ġwond": 5330, + "è¿IJè¡Į": 5331, + "Ġresil": 5332, + "çĽ¸ä¼¼": 5333, + "éĩijèŀį": 5334, + "çαæĥħ": 5335, + "ç¬Ķ": 5336, + "èĪª": 5337, + "è°Ī": 5338, + "åĬĽçļĦ": 5339, + "æľīæīĢ": 5340, + "æ½ľ": 5341, + "ulate": 5342, + "Ġdetection": 5343, + "å®£ä¼ł": 5344, + "Ġmatter": 5345, + "éĩıåŃIJ": 5346, + "Write": 5347, + "ç»ĵåIJĪ": 5348, + "ç»ıè¿ĩ": 5349, + "Ġdevelopers": 5350, + "èª": 5351, + "Ġ---": 5352, + "人éĻħ": 5353, + "çѾ": 5354, + "ï¼ļâĢľ": 5355, + "Ġinnovative": 5356, + "ãĢĤâĢĿ": 5357, + "å½¼": 5358, + "饼": 5359, + "è¿ĩ度": 5360, + "Ġplanet": 5361, + "åħ°": 5362, + "å¸ģ": 5363, + "æķ¬": 5364, + "Ġlegal": 5365, + "Ġlot": 5366, + "æĪIJ为äºĨ": 5367, + "iate": 5368, + "Ġmis": 5369, + "åģĩ设": 5370, + "çļĦæĸĩ竳": 5371, + "ĠCompan": 5372, + "Ġdoc": 5373, + "Ġcareful": 5374, + "Ġever": 5375, + "æĪij们å°Ĩ": 5376, + "ä¾ĭåŃIJ": 5377, + "ä¹³": 5378, + "ä½ľèĢħ": 5379, + "åIJ§": 5380, + "æļ´": 5381, + "Ġremember": 5382, + "缮çļĦ": 5383, + "Ġput": 5384, + "常è§ģçļĦ": 5385, + "Ġfest": 5386, + "建设": 5387, + "å®ŀç͍": 5388, + "Ġactive": 5389, + "çªĹ": 5390, + "outh": 5391, + "åİŁçIJĨ": 5392, + "Ġtrying": 5393, + "è¿·": 5394, + "缸åIJĮ": 5395, + "éħĴåºĹ": 5396, + "Another": 5397, + "æľĢä½³": 5398, + "Ġanalytics": 5399, + "Ġperpet": 5400, + "ipment": 5401, + "Ġå¦Ĥæŀľ": 5402, + "è§Ĥä¼Ĺ": 5403, + "Ġcelebr": 5404, + "Ġheav": 5405, + "Ġmeditation": 5406, + "大æ°Ķ": 5407, + "And": 5408, + "ä¸įéĶĻ": 5409, + "Ġwhether": 5410, + "set": 5411, + "Ġdemonstr": 5412, + "ä¸Ģ款": 5413, + "æĶ¶éĽĨ": 5414, + "éĻIJåζ": 5415, + "Ġing": 5416, + "Ġrevolution": 5417, + "çľģ": 5418, + "Ġscience": 5419, + "缮åīį": 5420, + "Ġthinking": 5421, + "±ä¹IJ": 5422, + "课ç¨ĭ": 5423, + "Ġpack": 5424, + "Ġimage": 5425, + "loc": 5426, + "Ġstories": 5427, + "uck": 5428, + "Ġsatisfaction": 5429, + "Ġcollection": 5430, + "ho": 5431, + "èµŀ": 5432, + "éĿ¢ä¸´": 5433, + "Ġla": 5434, + "Ġsymbol": 5435, + "Ġemb": 5436, + "Ġhabitats": 5437, + "Ġlower": 5438, + "Ġcontinues": 5439, + "éľĩ": 5440, + "åĵĪ": 5441, + "ĠTake": 5442, + "Ġenvironments": 5443, + "Ġthree": 5444, + "Ġenc": 5445, + "ĠAcc": 5446, + "æĦıåij³": 5447, + "åݨ": 5448, + "chan": 5449, + "ĠHum": 5450, + "Ġtrue": 5451, + "åĪĩæĪIJ": 5452, + "sing": 5453, + "âĢĶâĢĶ": 5454, + "åĩºæĿ¥": 5455, + "Ġregion": 5456, + "Ġinterpre": 5457, + "Ġdiagnosis": 5458, + "éŀ": 5459, + "Ġdoing": 5460, + "Ġrun": 5461, + "Ġcoffee": 5462, + "Ġmajor": 5463, + "Ġmindfulness": 5464, + "Ġaffordable": 5465, + "çϾ": 5466, + "Ġdetailed": 5467, + "éĿŀ常éĩįè¦ģçļĦ": 5468, + "çļĦæ²ŁéĢļ": 5469, + "çļĦæķħ": 5470, + "åĢĴåħ¥": 5471, + "Ġthemes": 5472, + "Ġnetwork": 5473, + "ï¼īï¼ļ": 5474, + "ĠUnited": 5475, + "çļĦæĮĩ": 5476, + "orts": 5477, + "åį«çĶŁ": 5478, + "Ġplanning": 5479, + "æĥł": 5480, + "åīª": 5481, + "ĠProv": 5482, + "çļĦåºĶç͍": 5483, + "Ġperi": 5484, + "Ġaccountable": 5485, + "çīĻ": 5486, + "çļĦçģ": 5487, + "Ġchoice": 5488, + "ĠComm": 5489, + "idents": 5490, + "çļĦå®īåħ¨": 5491, + "å¹¶ä¸į": 5492, + "太éĺ³ç³»": 5493, + "Ġreceive": 5494, + "Ġclose": 5495, + "çļĦæĹ¶åĢĻ": 5496, + "Ġchanging": 5497, + "ä»·å̼è§Ĥ": 5498, + "Ġperpetu": 5499, + "Ġseason": 5500, + "Ġmen": 5501, + "Ġlearned": 5502, + "Ġsituation": 5503, + "Ġreplace": 5504, + "head": 5505, + "让æĪij": 5506, + "åľ¨ä¸Ģèµ·": 5507, + "çļĦ空": 5508, + "éľ²": 5509, + "Ġenough": 5510, + "å±ķçݰ": 5511, + "Ġleaders": 5512, + "ancing": 5513, + "Ġtemperature": 5514, + "åı«": 5515, + "Ġ30": 5516, + "æĦıåij³çĿĢ": 5517, + "æ±ĩ": 5518, + "ĠGovern": 5519, + "Ġfocused": 5520, + "uro": 5521, + "Ġsimple": 5522, + "Ġhiking": 5523, + "æ¯Ĵ": 5524, + "Ġcomprehens": 5525, + "äºĪ": 5526, + "Ġcreated": 5527, + "cond": 5528, + "页": 5529, + "ĠWor": 5530, + "è¯ģæį®": 5531, + "Ġworkplace": 5532, + "Ġcharacters": 5533, + "çļĦ设计": 5534, + "Ġmechan": 5535, + "ĠDis": 5536, + "ç¥ŀç§ĺ": 5537, + "å·ŀ": 5538, + "ĠOn": 5539, + "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "additional_special_tokens": [], + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "legacy": true, + "model_max_length": 32768, + "pad_token": "", + "sp_model_kwargs": {}, + "spaces_between_special_tokens": false, + "tokenizer_class": "PreTrainedTokenizerFast", + "unk_token": "", + "chat_template": "{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{{ 'system\\n' + system_message + '\\n' }}{% else %}{{ 'system\\n你是 MiniMind,是一个有用的人工智能助手。\\n' }}{% endif %}{% for message in messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'user\\n' + content + '\\nassistant\\n' }}{% elif message['role'] == 'assistant' %}{{ content + '' + '\\n' }}{% endif %}{% endfor %}" +} \ No newline at end of file diff --git a/model/minimind_tokenizer/vocab.json b/model/minimind_tokenizer/vocab.json new file mode 100644 index 0000000..e1318cc --- /dev/null +++ b/model/minimind_tokenizer/vocab.json @@ -0,0 +1 @@ +{"":0,"":1,"":2,"!":3,"\"":4,"#":5,"$":6,"%":7,"&":8,"'":9,"(":10,")":11,"*":12,"+":13,",":14,"-":15,".":16,"/":17,"0":18,"1":19,"2":20,"3":21,"4":22,"5":23,"6":24,"7":25,"8":26,"9":27,":":28,";":29,"<":30,"=":31,">":32,"?":33,"@":34,"A":35,"B":36,"C":37,"D":38,"E":39,"F":40,"G":41,"H":42,"I":43,"J":44,"K":45,"L":46,"M":47,"N":48,"O":49,"P":50,"Q":51,"R":52,"S":53,"T":54,"U":55,"V":56,"W":57,"X":58,"Y":59,"Z":60,"[":61,"\\":62,"]":63,"^":64,"_":65,"`":66,"a":67,"b":68,"c":69,"d":70,"e":71,"f":72,"g":73,"h":74,"i":75,"j":76,"k":77,"l":78,"m":79,"n":80,"o":81,"p":82,"q":83,"r":84,"s":85,"t":86,"u":87,"v":88,"w":89,"x":90,"y":91,"z":92,"{":93,"|":94,"}":95,"~":96,"¡":97,"¢":98,"£":99,"¤":100,"¥":101,"¦":102,"§":103,"¨":104,"©":105,"ª":106,"«":107,"¬":108,"®":109,"¯":110,"°":111,"±":112,"²":113,"³":114,"´":115,"µ":116,"¶":117,"·":118,"¸":119,"¹":120,"º":121,"»":122,"¼":123,"½":124,"¾":125,"¿":126,"À":127,"Á":128,"Â":129,"Ã":130,"Ä":131,"Å":132,"Æ":133,"Ç":134,"È":135,"É":136,"Ê":137,"Ë":138,"Ì":139,"Í":140,"Î":141,"Ï":142,"Ð":143,"Ñ":144,"Ò":145,"Ó":146,"Ô":147,"Õ":148,"Ö":149,"×":150,"Ø":151,"Ù":152,"Ú":153,"Û":154,"Ü":155,"Ý":156,"Þ":157,"ß":158,"à":159,"á":160,"â":161,"ã":162,"ä":163,"å":164,"æ":165,"ç":166,"è":167,"é":168,"ê":169,"ë":170,"ì":171,"í":172,"î":173,"ï":174,"ð":175,"ñ":176,"ò":177,"ó":178,"ô":179,"õ":180,"ö":181,"÷":182,"ø":183,"ù":184,"ú":185,"û":186,"ü":187,"ý":188,"þ":189,"ÿ":190,"Ā":191,"ā":192,"Ă":193,"ă":194,"Ą":195,"ą":196,"Ć":197,"ć":198,"Ĉ":199,"ĉ":200,"Ċ":201,"ċ":202,"Č":203,"č":204,"Ď":205,"ď":206,"Đ":207,"đ":208,"Ē":209,"ē":210,"Ĕ":211,"ĕ":212,"Ė":213,"ė":214,"Ę":215,"ę":216,"Ě":217,"ě":218,"Ĝ":219,"ĝ":220,"Ğ":221,"ğ":222,"Ġ":223,"ġ":224,"Ģ":225,"ģ":226,"Ĥ":227,"ĥ":228,"Ħ":229,"ħ":230,"Ĩ":231,"ĩ":232,"Ī":233,"ī":234,"Ĭ":235,"ĭ":236,"Į":237,"į":238,"İ":239,"ı":240,"IJ":241,"ij":242,"Ĵ":243,"ĵ":244,"Ķ":245,"ķ":246,"ĸ":247,"Ĺ":248,"ĺ":249,"Ļ":250,"ļ":251,"Ľ":252,"ľ":253,"Ŀ":254,"ŀ":255,"Ł":256,"ł":257,"Ń":258,"Ġt":259,"Ġa":260,"in":261,"he":262,"re":263,"ï¼":264,"ä¸":265,"on":266,"at":267,"çļ":268,"çļĦ":269,"ï¼Į":270,"Ġs":271,"Ġc":272,"nd":273,"ãĢ":274,"er":275,"Ġthe":276,"es":277,"en":278,"or":279,"an":280,"Ġand":281,"ing":282,"Ġp":283,"it":284,"al":285,"ãĢĤ":286,"Ġo":287,"Ġw":288,"ä»":289,"Ġto":290,"is":291,"ou":292,"Ġm":293,"äº":294,"Ġin":295,"Ġf":296,"Ġb":297,"ed":298,"ion":299,"åı":300,"ic":301,"Ġd":302,"Ġof":303,"le":304,"ar":305,"ro":306,"ĠĠ":307,"åħ":308,"ent":309,"æľ":310,"Ġe":311,"åĴ":312,"è¿":313,"ä½":314,"åĴĮ":315,"æĪ":316,"å®":317,"åĪ":318,"ve":319,"us":320,"Ġre":321,"Ġh":322,"Ġth":323,"as":324,"ct":325,"çĶ":326,"om":327,"åľ":328,"å¤":329,"æĺ":330,"åĬ":331,"åIJ":332,"ä¸Ģ":333,"im":334,"è¯":335,"æĸ":336,"ation":337,"lo":338,"ç»":339,"Ġbe":340,"ãĢģ":341,"id":342,"Ġcan":343,"il":344,"æĺ¯":345,"ä¹":346,"è®":347,"ĠA":348,"Ġthat":349,"ĠT":350,"以":351,"ch":352,"Ġy":353,"ce":354,"ï¼ļ":355,"ot":356,"ers":357,"Ġn":358,"éĢ":359,"ra":360,"å°":361,"Ġg":362,"Ġyou":363,"åŃ":364,"Ġpro":365,"et":366,"åº":367,"åľ¨":368,"ly":369,"Ġis":370,"个":371,"Ġl":372,"ur":373,"Ġfor":374,"åı¯":375,"éĩ":376,"st":377,"çļĦæ":378,"ut":379,"Ġhe":380,"if":381,"ĥ½":382,"ä¼":383,"ĠI":384,"è¡":385,"ir":386,"ith":387,"å¹":388,"Ġare":389,"ig":390,"Ġst":391,"el":392,"ol":393,"å¸":394,"ul":395,"æĿ":396,"æĪij":397,"Ġon":398,"è¦":399,"æľī":400,"æĹ":401,"å¯":402,"è§":403,"è¦ģ":404,"Ġus":405,"ay":406,"æķ":407,"çī":408,"ow":409,"ment":410,"ç͍":411,"ess":412,"ä¸Ń":413,"们":414,"人":415,"åĩ":416,"Ġex":417,"ĠĠĠĠ":418,"åĽ":419,"åĮ":420,"å¼":421,"Ġcon":422,"se":423,"èĥ½":424,"çİ":425,"Ġan":426,"Ġwith":427,"为":428,"ate":429,"iv":430,"am":431,"Ġas":432,"ure":433,"è¿Ļ":434,"åĨ":435,"çŃ":436,"Ġor":437,"å·":438,"Ġal":439,"ies":440,"ç§":441,"Ġim":442,"æĢ":443,"ver":444,"ab":445,"äºĨ":446,"Ġsu":447,"Ġde":448,"ge":449,"th":450,"åı¯ä»¥":451,"èĢ":452,"ä¸į":453,"å¾":454,"ĠAI":455,"Ġen":456,"éĹ":457,"æī":458,"ak":459,"ive":460,"Ġmo":461,"å¥":462,"éĿ":463,"çĽ":464,"ity":465,"ä¿":466,"un":467,"è´":468,"åį":469,"Ġit":470,"Ġimp":471,"ect":472,"æł":473,"å½":474,"èĩ":475,"é¢":476,"åĵ":477,"æ³":478,"ort":479,"ad":480,"æŀ":481,"em":482,"Ġcom":483,"å¦":484,"her":485,"ere":486,"ĠS":487,"ial":488,"ĠC":489,"ĠThe":490,"çIJ":491,"çĶŁ":492,"æĦ":493,"pp":494,"æŃ":495,"æĸ¹":496,"qu":497,"Ġwh":498,"å¦Ĥ":499,"éľ":500,"ant":501,"Ġle":502,"Ġv":503,"æĭ":504,"æĬ":505,"ust":506,"æĹ¶":507,"çŃī":508,"åij":509,"对":510,"ter":511,"ld":512,"è¡Į":513,"Ġch":514,"ud":515,"éľĢ":516,"æ°":517,"æĪIJ":518,"Ġ|":519,"ac":520,"ain":521,"iz":522,"æı":523,"ions":524,"Ġha":525,"æĽ":526,"--":527,"æĿ¥":528,"ome":529,"å¿":530,"'s":531,"Ġne":532,"est":533,"ä¾":534,"um":535,"åΰ":536,"åľ°":537,"ist":538,"âĢ":539,"çī©":540,"ä¸Ģ个":541,"lp":542,"æİ":543,"èĩª":544,"Ġhelp":545,"Ġtheir":546,"æĶ":547,"ä½ľ":548,"ä¼ļ":549,"æĮ":550,"æĪij们":551,"nt":552,"äºİ":553,"åĪĨ":554,"res":555,"pe":556,"åĩº":557,"ide":558,"æĥ":559,"ĠH":560,"è¾":561,"ĠM":562,"ff":563,"æ¯":564,"od":565,"ical":566,"Ġwor":567,"ä¸Ĭ":568,"are":569,"æĽ´":570,"Ġyour":571,"ä¸ĭ":572,"èµ":573,"ations":574,"æķ°":575,"Ġte":576,"åİ":577,"çIJĨ":578,"ĠTh":579,"è¿ĩ":580,"å¹¶":581,"du":582,"éĿ¢":583,"Ġad":584,"ill":585,"æµ":586,"好":587,"oc":588,"act":589,"éľĢè¦ģ":590,"ä»ĸ":591,"å±":592,"Ġr":593,"Ġmore":594,"åѦ":595,"ç®":596,"igh":597,"äºĽ":598,"ĠB":599,"åĬ¨":600,"åĵģ":601,"èī":602,"ple":603,"Ġinc":604,"åIJĮ":605,"Ġexp":606,"ould":607,"ä½ł":608,"æį":609,"æıIJ":610,"大":611,"çݰ":612,"pt":613,"ĠP":614,"all":615,"åĬł":616,"ç§į":617,"Ġse":618,"åĬĽ":619,"out":620,"Ġhave":621,"çº":622,"ä½ĵ":623,"Ġprov":624,"åĮĸ":625,"å¤ļ":626,"å®ļ":627,"Ġused":628,"éĢļ":629,"cc":630,"è¿Ľ":631,"æ´":632,"Ġsh":633,"Ġab":634,"os":635,"Ġres":636,"ĠThis":637,"ç¨":638,"æĢ§":639,"age":640,"ri":641,"æ¸":642,"able":643,"åŃIJ":644,"Ġby":645,"åıij":646,"éĩı":647,"åºĶ":648,"Ġlo":649,"使":650,"åħ¶":651,"é«":652,"éĻ":653,"é«ĺ":654,"度":655,"è§£":656,"é£":657,"å°Ĩ":658,"æ³ķ":659,"and":660,"ä¿Ŀ":661,"ans":662,"for":663,"rom":664,"reat":665,"Ġpl":666,"çļĦç":667,"常":668,"è½":669,"Ġwe":670,"表":671,"ake":672,"æĪĸ":673,"é¢ĺ":674,"åŁ":675,"Ġme":676,"æĸĩ":677,"ther":678,"ke":679,"å®¶":680,"åIJĪ":681,"æľĢ":682,"ine":683,"Ġsome":684,"ç±":685,"éĩį":686,"æŀľ":687,"ĠW":688,"ĠE":689,"éĺ":690,"our":691,"rou":692,"çĤ":693,"æ±":694,"åħ³":695,"Ġint":696,"ance":697,"ä¹Ł":698,"éģ":699,"ĠĠĠ":700,"å®ĥ":701,"ag":702,"æ¬":703,"00":704,"è°":705,"ult":706,"yst":707,"éĹ´":708,"ç³":709,"Ġtr":710,"pl":711,"art":712,"æĦŁ":713,"æĤ":714,"ata":715,"ĠF":716,"form":717,"计":718,"Ġfrom":719,"ĠD":720,"éĹ®":721,"ight":722,"ces":723,"æį®":724,"lop":725,"ä¹ĭ":726,"Ġfe":727,"åģ":728,"velop":729,"Ġ1":730,"åĽł":731,"ks":732,"æ²":733,"Ġu":734,"å°ı":735,"ystem":736,"Ġdis":737,"ĠR":738,"gy":739,"å·¥":740,"ç¨ĭ":741,"å¢":742,"ence":743,"èĤ":744,"ç¡":745,"Ġtra":746,"å»":747,"åħ¥":748,"ign":749,"alth":750,"Ġsuch":751,"ach":752,"æĻ":753,"arn":754,"Ġdata":755,"è¶":756,"å®ŀ":757,"so":758,"Ġdevelop":759,"ç¤":760,"Ġacc":761,"ast":762,"èĢĮ":763,"Ġ\"":764,"Ġother":765,"建":766,"Ġeff":767,"ç«":768,"Ġman":769,"åħ¬":770,"åĢ":771,"çĦ":772,"ms":773,"å¼ı":774,"èī²":775,"å¾Ĺ":776,"ific":777,"Ġj":778,"Ġro":779,"Ġhas":780,"chn":781,"olo":782,"åζ":783,"èĬ":784,"使ç͍":785,"ous":786,"ual":787,"Ġat":788,"Ġem":789,"ell":790,"Ġsystem":791,"Ġhealth":792,"ities":793,"Ġexam":794,"ib":795,"éĶ":796,"Ġabout":797,"产":798,"åIJİ":799,"æĦı":800,"ç±»":801,"Ġpre":802,"æĤ¨":803,"Ġalso":804,"ents":805,"Ġind":806,"ind":807,"éĢĤ":808,"Ġtechn":809,"ress":810,"æĥħ":811,"éĹ®é¢ĺ":812,"Ġuse":813,"ï¼Ł":814,"Ġincl":815,"Ġspe":816,"ich":817,"ps":818,"æľº":819,"Ġthey":820,"ie":821,"Ġhow":822,"Ġwork":823,"ä¸ļ":824,"ç´":825,"Ġimpro":826,"Ġlearn":827,"æĸ°":828,"çĤ¹":829,"Ġcont":830,"ard":831,"çĦ¶":832,"æľ¬":833,"ç³»":834,"ç¡®":835,"设":836,"åħ·":837,"éĢī":838,"èĢħ":839,"éħ":840,"gh":841,"__":842,"Ġnot":843,"çľ":844,"缸":845,"Ġprovide":846,"åī":847,"ional":848,"Ġens":849,"ä¸İ":850,"è´¨":851,"ential":852,"ç»ı":853,"å¿ĥ":854,"ang":855,"æŃ¤":856,"end":857,"Ġpo":858,"è¿Ľè¡Į":859,"ice":860,"Ġ-":861,"Ġway":862,"å·±":863,"Ġ2":864,"ime":865,"ç½":866,"èĩªå·±":867,"Ġun":868,"bot":869,"Ġinclud":870,"ated":871,"æ°´":872,"éķ":873,"æĮģ":874,"代":875,"é¡":876,"æīĢ":877,"çĿ":878,"pport":879,"ood":880,"ike":881,"ru":882,"Ġcomm":883,"ĠL":884,"ä¿¡":885,"ĠG":886,"çŁ":887,"ç͵":888,"Ġwas":889,"low":890,"erv":891,"åĮħ":892,"ĠĠĠĠĠĠĠĠ":893,"Ġwhe":894,"dit":895,"Ġwhich":896,"Ġcomp":897,"éª":898,"ore":899,"ç¾":900,"Ġ=":901,"çī¹":902,"iff":903,"ert":904,"æģ":905,"rit":906,"Ġrec":907,"åĨħ":908,"æĺİ":909,"ors":910,"Ġpat":911,"----":912,"æŁ":913,"Ġapp":914,"ns":915,"åĬ¡":916,"aly":917,"ace":918,"æ´»":919,"ä¾Ľ":920,"av":921,"主":922,"Ġpers":923,"çĥ":924,"该":925,"Ġmy":926,"ç©":927,"eri":928,"让":929,"æĬĢ":930,"éķ¿":931,"ack":932,"ĠN":933,"Ġdiff":934,"Ġthis":935,"åĿ":936,"Ġensure":937,"å½ĵ":938,"Ġout":939,"Ġcl":940,"Ġk":941,"é¦":942,"ount":943,"çݯ":944,"åĬ©":945,"Ġtechnolo":946,"Ġthese":947,"ful":948,"éļ":949,"æ·":950,"ä¸ĢäºĽ":951,"Ġsoc":952,"å¼Ģ":953,"天":954,"Ġev":955,"Ġredu":956,"Ġthem":957,"Ġ(":958,"éĥ½":959,"æĪ·":960,"è·":961,"åľº":962,"æ°Ķ":963,"ĠY":964,"è¯Ń":965,"éĢļè¿ĩ":966,"å±ķ":967,"Ġco":968,"å½±":969,"ç¬":970,"Ġanaly":971,"æ¯Ķ":972,"åħ¨":973,"Ġimprove":974,"ç»ĵ":975,"å¹´":976,"çķ":977,"çĿĢ":978,"Ġhum":979,"Ġqu":980,"ç®Ĺ":981,"ĠO":982,"é£Ł":983,"ility":984,"Ġsystems":985,"åıĺ":986,"ail":987,"ç¼":988,"çł":989,"è¿Ļ个":990,"æıIJä¾Ľ":991,"ase":992,"åŀ":993,"ments":994,"Ġpot":995,"Ġany":996,"ä½Ĩ":997,"Ġcons":998,"ĠIt":999,"æł¼":1000,"Ġar":1001,"æľ¯":1002,"éĿŀ":1003,"Ġdo":1004,"Ġmay":1005,"æĭ©":1006,"ue":1007,"éĢīæĭ©":1008,"ry":1009,"éĥ":1010,"Ġlike":1011,"ong":1012,"èģ":1013,"``":1014,"ile":1015,"æ±Ĥ":1016,"Ġnew":1017,"ient":1018,"Ġimpact":1019,"è¿ĺ":1020,"注":1021,"ä¹Ī":1022,"缮":1023,"âĢľ":1024,"âĢĿ":1025,"ef":1026,"ä¾ĭ":1027,"Ġpotential":1028,"ok":1029,"åı¯èĥ½":1030,"Ġtrans":1031,"Ġact":1032,"ï¼ī":1033,"Ġspec":1034,"æ¶":1035,"Ġwill":1036,"交":1037,"ize":1038,"ç¾İ":1039,"å¸Ĥ":1040,"Ġstud":1041,"pon":1042,"èº":1043,"ä¸įåIJĮ":1044,"one":1045,"å¾Ī":1046,"åıĬ":1047,"å¦Ĥæŀľ":1048,"çIJĥ":1049,"ange":1050,"Ġneed":1051,"å¤ĸ":1052,"ety":1053,"aking":1054,"请":1055,"ater":1056,"Ġperson":1057,"ident":1058,"Ġso":1059,"Ġmake":1060,"å¹³":1061,"å¤Ł":1062,"身":1063,"ï¼Ī":1064,"Ġinform":1065,"æ¡":1066,"äºĭ":1067,"åıĹ":1068,"ased":1069,"ild":1070,"Ġoff":1071,"Ġthere":1072,"cis":1073,"è¢":1074,"éĥ¨":1075,"æ¯ı":1076,"ract":1077,"ass":1078,"Ġlearning":1079,"åĸ":1080,"å½¢":1081,"ire":1082,"ä»İ":1083,"bots":1084,"èĻ":1085,"帮":1086,"Ġdes":1087,"ĠIn":1088,"cess":1089,"Ġpe":1090,"ify":1091,"Ġwho":1092,"ä¹ł":1093,"æľŁ":1094,"Ġexperi":1095,"éĤ":1096,"Ġsc":1097,"ep":1098,"ä½ķ":1099,"Ġtime":1100,"éĿŀ常":1101,"æĭ¬":1102,"åķ":1103,"以ä¸ĭ":1104,"éģĵ":1105,"Ġcommun":1106,"Ġcould":1107,"ap":1108,"èIJ":1109,"è°ĥ":1110,"lic":1111,"duct":1112,"Ġits":1113,"cy":1114,"说":1115,"Ġmed":1116,"Ġcol":1117,"ular":1118,"éĩįè¦ģ":1119,"Ġsp":1120,"åĪ©":1121,"èµ·":1122,"Ġprovid":1123,"ices":1124,"åĻ":1125,"æĸĻ":1126,"Ġimport":1127,"ural":1128,"åŃĹ":1129,"Ġund":1130,"int":1131,"Ġover":1132,"åı¸":1133,"æł¹":1134,"é¥":1135,"ples":1136,"ä»ĸ们":1137,"gra":1138,"uring":1139,"now":1140,"åįķ":1141,"è¿ĻäºĽ":1142,"åīį":1143,"å®ī":1144,"Ġpr":1145,"åĮħæĭ¬":1146,"ç»Ļ":1147,"The":1148,"ä½į":1149,"å§":1150,"ç´ł":1151,"åijĺ":1152,"Ġident":1153,"åŀĭ":1154,"Ġadd":1155,"强":1156,"æĺ¯ä¸Ģ":1157,"ip":1158,"gor":1159,"Ġsupport":1160,"ne":1161,"Ġdiffere":1162,"åħĥ":1163,"Ġass":1164,"åĨ³":1165,"éĽ":1166,"åIJį":1167,"Ġgo":1168,"Ġtechnology":1169,"æĢ»":1170,"è®®":1171,"Ġinter":1172,"Ġinv":1173,"Ġour":1174,"æķĪ":1175,"ustom":1176,"Ġrel":1177,"ife":1178,"åύ":1179,"ings":1180,"ä»·":1181,"Ġpart":1182,"被":1183,"æīĭ":1184,"ary":1185,"Ġrespon":1186,"ĊĠĠĠ":1187,"好çļĦ":1188,"ative":1189,"帮åĬ©":1190,"绣":1191,"æĶ¾":1192,"ĠHere":1193,"çģ":1194,"Ġbut":1195,"æģ¯":1196,"æŃ£":1197,"ark":1198,"åħ¬åı¸":1199,"ory":1200,"å¢ĥ":1201,"lect":1202,"éŁ":1203,"æĥ³":1204,"é£İ":1205,"ating":1206,"Ġam":1207,"its":1208,"æ»":1209,"gorith":1210,"åĵį":1211,"ures":1212,"Ġeffect":1213,"Ġshould":1214,"Ġper":1215,"è±":1216,"ç²":1217,"ict":1218,"Ġalgorith":1219,"uc":1220,"rough":1221,"ä»»":1222,"ä»¶":1223,"Ġbet":1224,"ia":1225,"Ġanalyz":1226,"æł¹æį®":1227,"ized":1228,"æµģ":1229,"è§Ĥ":1230,"è£":1231,"æłĩ":1232,"iron":1233,"Ġcustom":1234,"Ġreg":1235,"Ġpersonal":1236,"èĥ½å¤Ł":1237,"ics":1238,"ivid":1239,"çĪ":1240,"èµĦ":1241,"æŃ¥":1242,"容":1243,"åĪĽ":1244,"èĪ":1245,"ä¹IJ":1246,"导":1247,"gan":1248,"èĬĤ":1249,"Ġall":1250,"ens":1251,"ame":1252,"ness":1253,"Ġup":1254,"ĠU":1255,"èĢĥ":1256,"elf":1257,"å̼":1258,"å°ij":1259,"æľį":1260,"ari":1261,"thical":1262,"viron":1263,"èĥ":1264,"ord":1265,"Ġsign":1266,"éĩĮ":1267,"ound":1268,"ople":1269,"åŁº":1270,"Ġinformation":1271,"Ġidentify":1272,"åĽŀ":1273,"Ġcre":1274,"éŁ³":1275,"ible":1276,"ub":1277,"è¿IJ":1278,"Ġlead":1279,"游":1280,"次":1281,"åĨĻ":1282,"éĤ£":1283,"get":1284,"èį":1285,"Ġexample":1286,"ä¼ĺ":1287,"å½±åĵį":1288,"ish":1289,"xt":1290,"æº":1291,"éªĮ":1292,"ob":1293,"客":1294,"å¤ĩ":1295,"åģ¥":1296,"车":1297,"社":1298,"ividual":1299,"ered":1300,"les":1301,"Ġenviron":1302,"Ġpeople":1303,"æĺŁ":1304,"çĸ":1305,"çĭ":1306,"Ġdet":1307,"æĹł":1308,"Ġif":1309,"ose":1310,"ite":1311,"å¢ŀ":1312,"éĴ":1313,"åIJĮæĹ¶":1314,"è¿°":1315,"æĸ¹å¼ı":1316,"åĽ½":1317,"é»":1318,"å¤Ħ":1319,"Ġexamples":1320,"æ®":1321,"Ġinto":1322,"æĮĩ":1323,"Ġhuman":1324,"åIJij":1325,"示":1326,"æķ°æį®":1327,"Ġ3":1328,"ĠJ":1329,"èı":1330,"çݯå¢ĥ":1331,"als":1332,"erst":1333,"Ġethical":1334,"ç»Ħ":1335,"ä¼ł":1336,"Ġdifferent":1337,"Ġknow":1338,"åºı":1339,"Ġindividual":1340,"æıIJé«ĺ":1341,"round":1342,"å°±":1343,"åıĸ":1344,"åŃĺ":1345,"两":1346,"çŁ¥":1347,"ources":1348,"ck":1349,"å£":1350,"ines":1351,"è¾¾":1352,"Ġmany":1353,"æķ´":1354,"æł·":1355,"ditional":1356,"omm":1357,"çͱ":1358,"éĢł":1359,"å®ĥ们":1360,"ues":1361,"Ġment":1362,"Ġimportant":1363,"Ġopt":1364,"Ġloc":1365,"ph":1366,"Ġprocess":1367,"Ġalgorithms":1368,"设计":1369,"Ġsocial":1370,"very":1371,"åĪĻ":1372,"ä¾ĭå¦Ĥ":1373,"认":1374,"Ġaut":1375,"Ġserv":1376,"gg":1377,"产åĵģ":1378,"è§Ħ":1379,"çľĭ":1380,"vel":1381,"æĸ¹æ³ķ":1382,"Ġben":1383,"åĽłæŃ¤":1384,"care":1385,"per":1386,"åĬŁ":1387,"建议":1388,"Ġpos":1389,"æ¤":1390,"we":1391,"åĮº":1392,"iqu":1393,"Ġreal":1394,"æĹ¥":1395,"Ġreduce":1396,"af":1397,"angu":1398,"Ġsk":1399,"Ġed":1400,"erstand":1401,"åĨµ":1402,"mot":1403,"åħĪ":1404,"ç¥":1405,"åºĶ该":1406,"Ġthrough":1407,"Ġconc":1408,"åıijå±ķ":1409,"è¯ķ":1410,"æ¡Ī":1411,"Ġenvironment":1412,"åı£":1413,"Ġadv":1414,"åĪ«":1415,"Ġbenef":1416,"æ¸ħ":1417,"åij³":1418,"åħī":1419,"Ġdevelopment":1420,"eng":1421,"å¦Ĥä½ķ":1422,"管":1423,"ivers":1424,"åIJĦ":1425,"Ġris":1426,"row":1427,"ergy":1428,"计ç®Ĺ":1429,"ä¿¡æģ¯":1430,"Ġproduct":1431,"è¾ĥ":1432,"论":1433,"èĩªå·±çļĦ":1434,"æĬ¤":1435,"åıį":1436,"åħ¶ä»ĸ":1437,"åĪĹ":1438,"ç»Ĩ":1439,"空":1440,"Ġgreat":1441,"ear":1442,"æºIJ":1443,"ject":1444,"çĶŁæ´»":1445,"ä¸ŃçļĦ":1446,"Ġunderstand":1447,"èĭ":1448,"hat":1449,"Ġprogra":1450,"çĬ":1451,"éĩij":1452,"Ġincluding":1453,"Ġaccess":1454,"ĠĠĠĠĠĠĠ":1455,"è¯Ĩ":1456,"ç¦":1457,"og":1458,"è£ħ":1459,"Ġart":1460,"Ġwrit":1461,"Ġincre":1462,"Ġph":1463,"æĸ¹éĿ¢":1464,"Ġpract":1465,"Ġusing":1466,"项":1467,"æİ¥":1468,"Ġways":1469,"Ġlangu":1470,"æĶ¯":1471,"Ġchall":1472,"åİ»":1473,"____":1474,"imate":1475,"æĸŃ":1476,"è¨":1477,"Ġwell":1478,"ll":1479,"Ġpol":1480,"æĢģ":1481,"Ġra":1482,"Can":1483,"åİŁ":1484,"ber":1485,"è¨Ģ":1486,"ç«ĭ":1487,"Ġgen":1488,"éħį":1489,"æ·±":1490,"te":1491,"ä¸ī":1492,"ç§ij":1493,"ĠFor":1494,"线":1495,"çħ":1496,"æ¼":1497,"åķĨ":1498,"æĿIJ":1499,"Ġsignific":1500,"Ġgu":1501,"Ġdecis":1502,"Ġtrain":1503,"Ġag":1504,"Ġcreat":1505,"å®Į":1506,"æĹ¶éĹ´":1507,"Ġone":1508,"èĦ":1509,"Ġnat":1510,"åŃ¦ä¹ł":1511,"çļĦæķ":1512,"ced":1513,"Ġwhen":1514,"Ġbi":1515,"èİ":1516,"æĽ´åĬł":1517,"ives":1518,"port":1519,"å·¥ä½ľ":1520,"ving":1521,"Ġbeen":1522,"æĻº":1523,"Ġlife":1524,"å¼ķ":1525,"arm":1526,"çİĩ":1527,"ç͍æĪ·":1528,"ä¹ī":1529,"份":1530,"è¯Ŀ":1531,"iness":1532,"com":1533,"康":1534,"åĩı":1535,"ä»Ģ":1536,"è¾ĵ":1537,"Ġvari":1538,"con":1539,"Ġmod":1540,"ä»Ģä¹Ī":1541,"Ġenergy":1542,"æĬĢæľ¯":1543,"ertain":1544,"mm":1545,"verall":1546,"åĪĴ":1547,"Ġrobots":1548,"Ġorgan":1549,"æİ¨":1550,"ants":1551,"åĩĨ":1552,"ds":1553,"æŀģ":1554,"çĻ":1555,"Ġrequ":1556,"Ġess":1557,"ç®Ģ":1558,"ustain":1559,"æ¨":1560,"Ġstr":1561,"cing":1562,"ability":1563,"ree":1564,"Ġeduc":1565,"åİĨ":1566,"Ġcreate":1567,"åģ¥åº·":1568,"Ġdesign":1569,"ips":1570,"åģļ":1571,"èĬ±":1572,"ink":1573,"èıľ":1574,"æī¾":1575,"段":1576,"æµĭ":1577,"ĠV":1578,"ĠBy":1579,"åĶ":1580,"é¦ĸ":1581,"è¯į":1582,"Ġwhere":1583,"Ġdisc":1584,"äºĨè§£":1585,"ric":1586,"ä¸Ķ":1587,"è¶³":1588,"æĺ¯ä¸Ģ个":1589,"arch":1590,"积":1591,"带":1592,"Ġwhile":1593,"Ġsignificant":1594,"çłģ":1595,"æĪ¿":1596,"Ġbeing":1597,"Ġlanguage":1598,"itive":1599,"20":1600,"Ġanalyze":1601,"æĻ¯":1602,"èĮ":1603,"rib":1604,"模":1605,"ĠSt":1606,"è´¹":1607,"'t":1608,"Ġhealthcare":1609,"Ġexperience":1610,"Ġ5":1611,"个人":1612,"ays":1613,"象":1614,"plo":1615,"Ġwould":1616,"èĻij":1617,"æĶ¶":1618,"é¢Ħ":1619,"é¢Ĩ":1620,"ä¿ĿæĮģ":1621,"ences":1622,"åıª":1623,"èĩ´":1624,"æĪı":1625,"Ġmental":1626,"Ġfew":1627,"ates":1628,"è¿ĩç¨ĭ":1629,"å®īåħ¨":1630,"Ġsustain":1631,"Ġwere":1632,"太":1633,"çĮ":1634,"Ġspecific":1635,"Ġworld":1636,"çŃĶ":1637,"```":1638,"Ġtake":1639,"åħ»":1640,"éĢŁ":1641,"ever":1642,"SS":1643,"éĶĢ":1644,"Ġbo":1645,"hes":1646,"Ġmus":1647,"æľįåĬ¡":1648,"è§Ĵ":1649,"ten":1650,"æŀIJ":1651,"pow":1652,"dict":1653,"vent":1654,"10":1655,"çļĦæĹ":1656,"ĸçķ":1657,"Ġprot":1658,"ç½®":1659,"Ġhigh":1660,"Ġbus":1661,"Ġindust":1662,"åIJ¦":1663,"cial":1664,"人们":1665,"ĠAs":1666,"åijĬ":1667,"ade":1668,"æĶ¹":1669,"çĹ":1670,"Ġhad":1671,"Ġher":1672,"Ġjust":1673,"ï¼Ľ":1674,"è´Ń":1675,"第":1676,"éĵ":1677,"Ġwater":1678,"Ġfood":1679,"éĺŁ":1680,"aus":1681,"Ġchalleng":1682,"åħį":1683,"æĸĩåĮĸ":1684,"Ġmost":1685,"é¸":1686,"ç½ij":1687,"缴":1688,"Ġsm":1689,"Ġactiv":1690,"ploy":1691,"Overall":1692,"å¿«":1693,"ruct":1694,"Ġindividuals":1695,"å§ĭ":1696,"gies":1697,"æŁ¥":1698,"çα":1699,"iety":1700,"In":1701,"åĪĨæŀIJ":1702,"è§Ĩ":1703,"温":1704,"ç»´":1705,"olut":1706,"åŁŁ":1707,"ommend":1708,"Ġcomple":1709,"æķĻ":1710,"Ġbu":1711,"Ġeducation":1712,"ather":1713,"Ġ4":1714,"ting":1715,"Ġfind":1716,"没":1717,"Ġhis":1718,"ä¹ĭéĹ´":1719,"Ġeffective":1720,"Ġatt":1721,"Ġrese":1722,"èĥ½åĬĽ":1723,"åŁİ":1724,"Ġallow":1725,"Ġav":1726,"Ġpromot":1727,"æĻºèĥ½":1728,"满":1729,"åħ±":1730,"iew":1731,"come":1732,"ç³»ç»Ł":1733,"Ġrespons":1734,"äºĴ":1735,"Ġcult":1736,"powered":1737,"Ġrecommend":1738,"èIJ¥":1739,"OSS":1740,"Ġchange":1741,"è¯ģ":1742,"ved":1743,"æİĴ":1744,"è§£åĨ³":1745,"ici":1746,"ĠHow":1747,"Ġfeel":1748,"æľĪ":1749,"Ġwhat":1750,"以åıĬ":1751,"Ġsee":1752,"åŃ©":1753,"bs":1754,"Ġsur":1755,"æ£":1756,"ality":1757,"Ġvis":1758,"ç¡®ä¿Ŀ":1759,"pect":1760,"å®ŀçݰ":1761,"Ġcare":1762,"广":1763,"ills":1764,"åºŃ":1765,"ases":1766,"å¤į":1767,"åºĶç͍":1768,"çļĦæĥ":1769,"ards":1770,"Ġaddress":1771,"Ġcompan":1772,"Ġinvol":1773,"Ġcustomer":1774,"åĽłä¸º":1775,"Ġstudents":1776,"Ġins":1777,"注æĦı":1778,"æŀĦ":1779,"欢":1780,"æµ·":1781,"åıĤ":1782,"èĩªçĦ¶":1783,"é©":1784,"ĠThese":1785,"wn":1786,"æĺĵ":1787,"çĬ¶":1788,"ren":1789,"Ġtreat":1790,"Ġbenefits":1791,"ĊĠĠĠĠĠĠĠ":1792,"对äºİ":1793,"æĢĿ":1794,"ider":1795,"ĠYes":1796,"ĠK":1797,"åĸľ":1798,"Ġke":1799,"Ġeng":1800,"Ġpop":1801,"ost":1802,"pare":1803,"Ġmon":1804,"款":1805,"ĠMOSS":1806,"Ġemot":1807,"Ġac":1808,"ç¼ĸ":1809,"fore":1810,"åı¥":1811,"Ġval":1812,"ily":1813,"Ġiss":1814,"èĤī":1815,"èĩ³":1816,"游æĪı":1817,"ween":1818,"Ġinclude":1819,"Ġprotect":1820,"åħ³ç³»":1821,"éĻ©":1822,"Ġsever":1823,"Ġthan":1824,"éľĢæ±Ĥ":1825,"ç»ĥ":1826,"ĠThey":1827,"iss":1828,"ys":1829,"Ġjob":1830,"éĺ³":1831,"æIJ":1832,"Ġbetween":1833,"Ġmach":1834,"--------":1835,"èĢĥèĻij":1836,"è´¨éĩı":1837,"Ġbusiness":1838,"wor":1839,"ick":1840,"eg":1841,"åħħ":1842,"ç¯":1843,"æĿ¡":1844,"ner":1845,"apt":1846,"Ġappro":1847,"Ġplay":1848,"没æľī":1849,"¤IJ":1850,"æľª":1851,"æĪĺ":1852,"å®¶åºŃ":1853,"ãĢĭ":1854,"ency":1855,"ĠCh":1856,"ãĢĬ":1857,"Ġproviding":1858,"Ġresources":1859,"âĢĻ":1860,"Ġassist":1861,"Ġnatural":1862,"è¯Ħ":1863,"便":1864,"Ġsaf":1865,"åħ·æľī":1866,"è°¢":1867,"çĥŃ":1868,"ss":1869,"eth":1870,"old":1871,"Ġperform":1872,"Ġseveral":1873,"é¤IJ":1874,"Ġeach":1875,"转":1876,"ci":1877,"Ġty":1878,"Ġpub":1879,"æ´»åĬ¨":1880,"ocus":1881,"çīĮ":1882,"è¶Ĭ":1883,"åĽ¢":1884,"è½»":1885,"è¯Ńè¨Ģ":1886,"Ġareas":1887,"éĩĩ":1888,"ft":1889,"riend":1890,"å·²":1891,"å¸Ĥåľº":1892,"ition":1893,"ients":1894,"管çIJĨ":1895,"许":1896,"人类":1897,"身ä½ĵ":1898,"ique":1899,"Ġpartic":1900,"ç»Ń":1901,"agement":1902,"ves":1903,"符":1904,"line":1905,"红":1906,"åIJ¸":1907,"Ġpatter":1908,"000":1909,"社ä¼ļ":1910,"åĨħ容":1911,"Ġorganiz":1912,"ough":1913,"Ġve":1914,"åŃ©åŃIJ":1915,"æĸ½":1916,"æ¤į":1917,"åĩł":1918,"ä½Ĩæĺ¯":1919,"Ġaff":1920,"Ġnum":1921,"lement":1922,"èīº":1923,"èij":1924,"Ġcar":1925,"ages":1926,"abor":1927,"æĺ¯ä¸Ģç§į":1928,"Ġinst":1929,"èĽ":1930,"ä¹ĭä¸Ģ":1931,"è·¯":1932,"åį³":1933,"Ġmain":1934,"éļı":1935,"How":1936,"å¿ħ":1937,"ç¨ĭåºı":1938,"éŁ³ä¹IJ":1939,"red":1940,"æ²¹":1941,"Ġoffer":1942,"ets":1943,"ç¢":1944,"Ġduring":1945,"çļĦ人":1946,"æĽ´å¤ļ":1947,"Ġdi":1948,"代çłģ":1949,"èİ·":1950,"åħĭ":1951,"Ġguid":1952,"主è¦ģ":1953,"Ġfam":1954,"æİ§":1955,"éĢļ常":1956,"ĠAd":1957,"å¤ĦçIJĨ":1958,"urn":1959,"ower":1960,"åij½":1961,"æıı":1962,"Ġskills":1963,"Ġtool":1964,"ware":1965,"æĸĩæľ¬":1966,"Ġpatterns":1967,"缮æłĩ":1968,"acy":1969,"æīĵ":1970,"åŁİå¸Ĥ":1971,"Ġevery":1972,"ries":1973,"读":1974,"éģ¿":1975,"çϽ":1976,"éĢĤåIJĪ":1977,"Ġpatient":1978,"羣":1979,"oth":1980,"她":1981,"åĶ®":1982,"ä¸Ģç§į":1983,"Ġmade":1984,"ä½İ":1985,"ise":1986,"Ġrem":1987,"æ¶Ī":1988,"åIJ«":1989,"air":1990,"Ġgener":1991,"oy":1992,"ç²¾":1993,"æĥħåĨµ":1994,"ights":1995,"Ġexpl":1996,"è§ģ":1997,"Ġpredict":1998,"ç±³":1999,"æĽ´å¥½":2000,"ä¿®":2001,"Ġclimate":2002,"Ġfocus":2003,"Ġgrow":2004,"客æĪ·":2005,"ä¸įæĸŃ":2006,"itor":2007,"ĠEn":2008,"约":2009,"æĺ¯åIJ¦":2010,"ä»ħ":2011,"æĪij们çļĦ":2012,"æľĽ":2013,"op":2014,"Ġmaking":2015,"yth":2016,"ccess":2017,"Ġown":2018,"ggest":2019,"Ġtas":2020,"uture":2021,"Ġmodel":2022,"put":2023,"Ġresearch":2024,"erest":2025,"éļ¾":2026,"Ġ[":2027,"iel":2028,"ational":2029,"Ġcommunic":2030,"ç¥ŀ":2031,"ç©¶":2032,"Ġrest":2033,"æĪIJ为":2034,"king":2035,"pr":2036,"åĮ»":2037,"cur":2038,"èĤ²":2039,"Ġ'":2040,"è¿Ļç§į":2041,"ç¯ĩ":2042,"Ġche":2043,"own":2044,"éĻħ":2045,"Ġfin":2046,"åĪ¶ä½ľ":2047,"Ġsuggest":2048,"å¢ŀåĬł":2049,"Ġmedia":2050,"ribut":2051,"çļĦæĥħ":2052,"åĬłåħ¥":2053,"Ġcle":2054,"åij¨":2055,"竳":2056,"Ġthink":2057,"Ġlocal":2058,"pportun":2059,"ĠYou":2060,"Ġplan":2061,"Ġeven":2062,"éĽĨ":2063,"å·§":2064,"ax":2065,"Ġchallenges":2066,"Ġprof":2067,"ĠCan":2068,"Ġconcer":2069,"Ġfuture":2070,"åĬ¿":2071,"Ġref":2072,"èģĶ":2073,"Ġself":2074,"æĪĸèĢħ":2075,"ble":2076,"åĽ´":2077,"è¿IJåĬ¨":2078,"Ġinf":2079,"éĩĬ":2080,"Ġsustainable":2081,"Ġtext":2082,"Ġgra":2083,"äºĮ":2084,"åĵģçīĮ":2085,"ä¸įåIJĮçļĦ":2086,"led":2087,"çĭ¬":2088,"Ġopportun":2089,"Ġcontin":2090,"ym":2091,"Ġget":2092,"å¯Ĩ":2093,"éϤ":2094,"æħ":2095,"éģ¿åħį":2096,"Ġ+":2097,"è§ī":2098,"Ġret":2099,"å¸ĥ":2100,"Ġinterest":2101,"Ġsociety":2102,"ç»ĵæŀľ":2103,"åIJ¬":2104,"é¦ĸåħĪ":2105,"Ġbre":2106,"Ġ20":2107,"ĠHowever":2108,"è®°":2109,"ons":2110,"è¿ij":2111,"å¼Ģå§ĭ":2112,"Ġbuild":2113,"Ġbeh":2114,"'m":2115,"vers":2116,"Ġgood":2117,"çIJĨè§£":2118,"resent":2119,"离":2120,"åĬŁèĥ½":2121,"Ġeffort":2122,"labor":2123,"é»ij":2124,"Ġbetter":2125,"Ġread":2126,"å¾ĭ":2127,"èĽĭ":2128,"hed":2129,"ä¹°":2130,"导èĩ´":2131,"Ġimplement":2132,"ç¿":2133,"享":2134,"头":2135,"ense":2136,"Ġlong":2137,"other":2138,"饮":2139,"åŃĺåľ¨":2140,"çļĦæĦ":2141,"ä¸Ģ份":2142,"ython":2143,"ning":2144,"åĩıå°ij":2145,"åĢĻ":2146,"ä¸ĵ":2147,"åIJĦç§į":2148,"èħ":2149,"å°½":2150,"åįĩ":2151,"æĬ¥":2152,"Ġpublic":2153,"Ġlar":2154,"ä½łçļĦ":2155,"aut":2156,"é¢ĨåŁŁ":2157,"æļ":2158,"ollow":2159,"èģĮ":2160,"Ġchang":2161,"Ġbest":2162,"hip":2163,"åĨį":2164,"akes":2165,"Ġchat":2166,"ited":2167,"Ġpower":2168,"ä¿ĿæĬ¤":2169,"书":2170,"计åĪĴ":2171,"éĩįè¦ģçļĦ":2172,"åıĺåĮĸ":2173,"ilities":2174,"Ġconsider":2175,"æĪij们åı¯ä»¥":2176,"éĤ£ä¹Ī":2177,"Ġide":2178,"æ¼Ķ":2179,"aging":2180,"Ġbased":2181,"å®Ŀ":2182,"Ġrange":2183,"Ġresult":2184,"Ġmem":2185,"çħ§":2186,"Ġlevel":2187,"cou":2188,"Ġbr":2189,"Th":2190,"ä¼ģ":2191,"建ç«ĭ":2192,"Ġunique":2193,"è®Ń":2194,"Ġmark":2195,"许å¤ļ":2196,"è¡Į为":2197,"Ķç©¶":2198,"çļĦæĬ":2199,"Ġset":2200,"骤":2201,"ts":2202,"Ġhist":2203,"Ġaround":2204,"Ġrev":2205,"åħ¶ä¸Ń":2206,"ï¼ģ":2207,"æııè¿°":2208,"æľĢåIJİ":2209,"Ġsim":2210,"nect":2211,"åĽŀçŃĶ":2212,"éĺ²":2213,"èī¯":2214,"åΰäºĨ":2215,"ä¸ĸçķ":2216,"æĸ¹æ¡Ī":2217,"æĿIJæĸĻ":2218,"ä¸ĸçķĮ":2219,"æĽ´å¥½åľ°":2220,"两个":2221,"Ġemploy":2222,"Ġtry":2223,"æĵ":2224,"Ġback":2225,"åĪĩ":2226,"Ġsuccess":2227,"Ġdecisions":2228,"Ġthose":2229,"å¯Į":2230,"Ġfact":2231,"æİ¢":2232,"è¶£":2233,"Ġpractices":2234,"åIJĹ":2235,"æīį":2236,"çİ©":2237,"ption":2238,"æĸĩ竳":2239,"Ġfeat":2240,"Ġprevent":2241,"Ġwriting":2242,"çļĦæĢ":2243,"Ġno":2244,"ä»ĭ":2245,"éŨ":2246,"Ġdel":2247,"æĴ":2248,"Ġoptim":2249,"ination":2250,"ĠĊ":2251,"usion":2252,"Ġaccount":2253,"ling":2254,"Ġdivers":2255,".\"":2256,"ath":2257,"èĭ±":2258,"ä¼ģä¸ļ":2259,"Ġgrou":2260,"åľ°çIJĥ":2261,"失":2262,"Ġpersonalized":2263,"ĠHe":2264,"表达":2265,"curity":2266,"Ġfollow":2267,"产çĶŁ":2268,"Ġear":2269,"åİĭ":2270,"vern":2271,"Ġissues":2272,"åĿĩ":2273,"é²":2274,"Ġdr":2275,"iving":2276,"Ġtraining":2277,"Ġrisk":2278,"åĩ½":2279,"åı²":2280,"æij":2281,"çļĦæĹ¶":2282,"ogn":2283,"Ġrequire":2284,"Ġenvironmental":2285,"back":2286,"éĶ®":2287,"çĸĹ":2288,"Ġinteract":2289,"åĽ¢éĺŁ":2290,"æ¯ı个":2291,"çĦ¶åIJİ":2292,"Ġdist":2293,"ç͍äºİ":2294,"认为":2295,"åĩ½æķ°":2296,"Ġsent":2297,"ĊĠĠĠĠĠĠĠĠ":2298,"Ġreducing":2299,"å¹²":2300,"Ġrep":2301,"Ġcaus":2302,"Ġmusic":2303,"çª":2304,"Ġmonitor":2305,"Ġform":2306,"é¢ľ":2307,"çĹħ":2308,"é¦Ļ":2309,"Ġoften":2310,"åı¯èĥ½ä¼ļ":2311,"åijĺå·¥":2312,"Ġhand":2313,"æĬķ":2314,"Ġneeds":2315,"æŃ¤å¤ĸ":2316,"åıĭ":2317,"ivity":2318,"Ġactivities":2319,"åĸľæ¬¢":2320,"Ġpur":2321,"ian":2322,"self":2323,"åĬ¨çī©":2324,"comes":2325,"å©":2326,"Ġpriv":2327,"az":2328,"Ġrelations":2329,"Ġmachine":2330,"çļĦæ°":2331,"ä»·æł¼":2332,"ä»·å̼":2333,"ç´¢":2334,"Ġfeed":2335,"ä¸Ģä¸ĭ":2336,"Ġteam":2337,"Ġindustry":2338,"è´¢":2339,"ĠPro":2340,"Ġwant":2341,"ç§°":2342,"Ġclass":2343,"Ġlove":2344,"åħ³äºİ":2345,"è¾ĵåħ¥":2346,"Ġtransport":2347,"Ġcomplex":2348,"Ġyear":2349,"éĶĢåĶ®":2350,"寻":2351,"ience":2352,"ists":2353,"æĶ¯æĮģ":2354,"Ġmind":2355,"Ġfun":2356,"Ġchar":2357,"æĮī":2358,"Ġconcerns":2359,"conom":2360,"ç®Ģåįķ":2361,"以ä¸ĭæĺ¯":2362,"Ġstart":2363,"å¹¶ä¸Ķ":2364,"avi":2365,"ä¸ŃåĽ½":2366,"åħĥç´ł":2367,"Ġconf":2368,"Ġpositive":2369,"Ġcur":2370,"Ġcount":2371,"ery":2372,"å¡":2373,"室":2374,"Ġcost":2375,"Ġequ":2376,"Ġpolic":2377,"aste":2378,"aw":2379,"éħĴ":2380,"coura":2381,"iven":2382,"place":2383,"chie":2384,"çļĦæķ°":2385,"åĽłç´ł":2386,"Ġfl":2387,"ism":2388,"Ġmedical":2389,"Ġhumans":2390,"Ġautom":2391,"ertainly":2392,"Ġ0":2393,"Ġoffers":2394,"Ġdetect":2395,"Ġ6":2396,"é£İæł¼":2397,"Ġshow":2398,"çģ«":2399,"Ġanim":2400,"é¢ľèī²":2401,"lease":2402,"ave":2403,"åĵª":2404,"ĠThere":2405,"以ä¸Ĭ":2406,"æľªæĿ¥":2407,"XX":2408,"çīĩ":2409,"uch":2410,"Ġtasks":2411,"åħ·ä½ĵ":2412,"æ¤įçī©":2413,"Ġmin":2414,"èīºæľ¯":2415,"icult":2416,"Ġexperiences":2417,"æİ§åζ":2418,"be":2419,"Ġpatients":2420,"å²":2421,"ĠWe":2422,"Ġrecogn":2423,"çĥ¤":2424,"Ġsmall":2425,"åĿĹ":2426,"åĦ":2427,"太éĺ³":2428,"ction":2429,"Ġent":2430,"æį¢":2431,"Ġbefore":2432,"Ġbecome":2433,"å·²ç»ı":2434,"表çݰ":2435,"Ġexplo":2436,"Ġachie":2437,"ä»»åĬ¡":2438,"大çļĦ":2439,"Ġday":2440,"Ġfound":2441,"å±±":2442,"ond":2443,"Ġtreatment":2444,"pend":2445,"hen":2446,"Ġcondit":2447,"ç¡®å®ļ":2448,"Ġbusinesses":2449,"ĠWh":2450,"æīĢæľī":2451,"Ġdeveloped":2452,"ç»Ī":2453,"æŃ¥éª¤":2454,"Ġdifficult":2455,"åı·":2456,"ĠRe":2457,"éĶĻ":2458,"Ġcho":2459,"Ġquest":2460,"Ġtranspare":2461,"Ġproject":2462,"Ġcommunity":2463,"ov":2464,"å¸Ī":2465,"å¼ł":2466,"åĪĨç±»":2467,"人çļĦ":2468,"sis":2469,"çĽĬ":2470,"oid":2471,"ĠAn":2472,"ways":2473,"Ġeas":2474,"Ġaffect":2475,"Ġothers":2476,"Ġregul":2477,"æĢ§åĴĮ":2478,"åĸĦ":2479,"agn":2480,"ä½ľä¸º":2481,"åı¯ä»¥å¸®åĬ©":2482,"åĦ¿":2483,"Ġorganizations":2484,"鸡":2485,"åħ´":2486,"Ġfriend":2487,"Ġ$":2488,"Ġdetail":2489,"Ġtraditional":2490,"Ġdesigned":2491,"è´Ńä¹°":2492,"ä½ĵéªĮ":2493,"ç»į":2494,"erm":2495,"Ġconnect":2496,"è¿Ļæł·":2497,"Ġrecommendations":2498,"Ġboth":2499,"ŁéĢļ":2500,"æ¯į":2501,"Ġsit":2502,"ä½ľç͍":2503,"ä»ĭç»į":2504,"Ġste":2505,"ĠSure":2506,"åı°":2507,"æĤ¨çļĦ":2508,"Ġshe":2509,"Ġmanagement":2510,"joy":2511,"è´Ł":2512,"Ġpromote":2513,"Ġvarious":2514,"(\"":2515,"por":2516,"Ġsens":2517,"Ġessential":2518,"gether":2519,"ularly":2520,"äºī":2521,"irst":2522,"Ġop":2523,"Ġspecies":2524,"çİ°åľ¨":2525,"cho":2526,"Ġbehavi":2527,"çŃij":2528,"女":2529,"Ġquality":2530,"Ġext":2531,"è¥":2532,"å®ĮæĪIJ":2533,"æĢ»ä¹ĭ":2534,"éĥ¨åĪĨ":2535,"ä»İèĢĮ":2536,"åĽ¾":2537,"Ġtyp":2538,"Ġstrate":2539,"西":2540,"Ġhere":2541,"ars":2542,"å¸Į":2543,"çļĦæĿ":2544,"å°Ŀ":2545,"ee":2546,"ier":2547,"Ġec":2548,"ically":2549,"ering":2550,"念":2551,"ĠDe":2552,"Ġneg":2553,"建çŃij":2554,"Ġservices":2555,"Ġable":2556,"imes":2557,"Ġoptions":2558,"缸åħ³":2559,"Ġsub":2560,"Ġdecision":2561,"ĠCertainly":2562,"Ġåľ¨":2563,"æ¢":2564,"Ġservice":2565,"):":2566,"带æĿ¥":2567,"Ġchild":2568,"è§£éĩĬ":2569,"irt":2570,"çĨ":2571,"ä¸įä»ħ":2572,"æĿ¾":2573,"积æŀģ":2574,"ron":2575,"åı¤":2576,"çłĶç©¶":2577,"ç²ī":2578,"hor":2579,"Ġprofess":2580,"çļĦéĹ®é¢ĺ":2581,"Ġopportunities":2582,"åİĨåı²":2583,"Ġdef":2584,"ĠAm":2585,"Ġgr":2586,"aur":2587,"å±Ĥ":2588,"çŃĸ":2589,"Ġpopular":2590,"æ´ģ":2591,"åıijçݰ":2592,"Ġpoem":2593,"èµĽ":2594,"Ġob":2595,"Ġdon":2596,"Ġsound":2597,"Ġtransportation":2598,"ious":2599,"åı¦":2600,"Ġrole":2601,"Ġfiel":2602,"ç§ijåѦ":2603,"èĢģ":2604,"reen":2605,"æľīæķĪ":2606,"Ġcor":2607,"Ġfeedback":2608,"Ġtechnologies":2609,"交éĢļ":2610,"Ġadapt":2611,"'re":2612,"ervation":2613,"Ġcommunities":2614,"çݰ代":2615,"Ġlook":2616,"Ġfac":2617,"ç͵影":2618,"Ġcollect":2619,"å¾Ĺåΰ":2620,"hips":2621,"Ġavail":2622,"eren":2623,"ä¸Ģèµ·":2624,"çīĽ":2625,"Ġposs":2626,"Ġweather":2627,"Ġefforts":2628,"¿Ģ":2629,"æĹħ":2630,"oh":2631,"Ġcollabor":2632,"æĭ¥":2633,"æĪIJåĬŁ":2634,"èİ·å¾Ĺ":2635,"å±ħ":2636,"Ġtre":2637,"Ġsources":2638,"Ġstudy":2639,"Ġprograms":2640,"éĻIJ":2641,"Ġtips":2642,"Ġmarket":2643,"ally":2644,"害":2645,"wards":2646,"æ£Ģ":2647,"ä¸Ģç¯ĩ":2648,"rior":2649,"Ġtop":2650,"Ġend":2651,"åĭ":2652,"Ġlarge":2653,"iciency":2654,"Ġdec":2655,"å®ļçļĦ":2656,"icient":2657,"è¿ĩç¨ĭä¸Ń":2658,"lications":2659,"缺":2660,"Ġtour":2661,"Ġtogether":2662,"人工":2663,"Ġtools":2664,"æĸ¯":2665,"æ°ij":2666,"æĬĬ":2667,"ä¹ĭéĹ´çļĦ":2668,"çī¹çĤ¹":2669,"Ġbel":2670,"ditionally":2671,"åĪ©ç͍":2672,"è¾¹":2673,"éĻį":2674,"ĠIf":2675,"é¢Ŀ":2676,"åįı":2677,"å¾Ģ":2678,"lish":2679,"è¯ī":2680,"ins":2681,"奶":2682,"Ġeconom":2683,"Ġinvest":2684,"ĠDo":2685,"tain":2686,"åĩºçݰ":2687,"çļĦå½±åĵį":2688,"aterial":2689,"Ġsure":2690,"Ġpass":2691,"çĶ»":2692,"è´£":2693,"ç»ĵæŀĦ":2694,"æķħ":2695,"æĥħæĦŁ":2696,"æ¿Ģ":2697,"ellig":2698,"ä¼Ĺ":2699,"æ¯Ķè¾ĥ":2700,"tern":2701,"Ġoutcomes":2702,"up":2703,"Ġbeaut":2704,"read":2705,"çĶŁæĪIJ":2706,"æķ°åŃĹ":2707,"Ġdem":2708,"ires":2709,"åı¯ä»¥éĢļè¿ĩ":2710,"æĸ°çļĦ":2711,"Ġdeep":2712,"å¨":2713,"çĭĹ":2714,"åħ³æ³¨":2715,"çĶŁåij½":2716,"ä¼łç»Ł":2717,"Ġstay":2718,"æŃĮ":2719,"åħ³éĶ®":2720,"Ġplace":2721,"主é¢ĺ":2722,"å¾Īå¤ļ":2723,"èĪĴ":2724,"Ġprofessional":2725,"yle":2726,"æĽ²":2727,"19":2728,"Ġessay":2729,"Ġgive":2730,"ç³ĸ":2731,"Ġonly":2732,"æŁIJ":2733,"Ġphys":2734,"对è¯Ŀ":2735,"Ġcontro":2736,"Ġamount":2737,"cept":2738,"ization":2739,"ç¼ĸåĨĻ":2740,"åıĹåΰ":2741,"Ġalways":2742,"æ¯Ķå¦Ĥ":2743,"Ġprivacy":2744,"au":2745,"________":2746,"Ġresponsible":2747,"()":2748,"çŃīçŃī":2749,"Ġmaterial":2750,"Ġonline":2751,"é¼":2752,"æĶ¿":2753,"åĽĽ":2754,"Ġenjoy":2755,"åľŁ":2756,"Ġsafety":2757,"Ġtw":2758,"Ġcommunication":2759,"丽":2760,"æĺ¾":2761,"olution":2762,"erg":2763,"įä½ľ":2764,"Ġuser":2765,"Ġemotional":2766,"time":2767,"é¾":2768,"Ġsecurity":2769,"Ġsense":2770,"elines":2771,"åĬ±":2772,"çī©è´¨":2773,"ura":2774,"Ġshare":2775,"Ġanalyzing":2776,"ital":2777,"é±":2778,"irtual":2779,"Ġvisit":2780,"bers":2781,"Ġcour":2782,"Ġproble":2783,"设å¤ĩ":2784,"atch":2785,"land":2786,"é±¼":2787,"æĪij们éľĢè¦ģ":2788,"稳":2789,"ibility":2790,"Ġefficiency":2791,"声":2792,"èĴ":2793,"æľºåύ":2794,"Ġclear":2795,"åζå®ļ":2796,"izing":2797,"Ġconditions":2798,"lusion":2799,"Ġlow":2800,"Ġlim":2801,"hers":2802,"Ġrisks":2803,"ç¿»":2804,"Ġlet":2805,"åĴĸ":2806,"å¿ĥçIJĨ":2807,"è¿ľ":2808,"print":2809,"Ġchanges":2810,"Ġmeas":2811,"Ġimproving":2812,"Ġcrit":2813,"50":2814,"å¸ĮæľĽ":2815,"Ġaud":2816,"åįĹ":2817,"æĹłæ³ķ":2818,"Ġnegative":2819,"é¡¹çĽ®":2820,"und":2821,"ats":2822,"Ġcompanies":2823,"æī¾åΰ":2824,"Ġcontribut":2825,"æŃ£ç¡®":2826,"é»Ħ":2827,"å±ŀ":2828,"Ġunderstanding":2829,"Ġmult":2830,"Ġclo":2831,"å¾ģ":2832,"Ġprior":2833,"rim":2834,"人工æĻºèĥ½":2835,"Ġvariety":2836,"Ġtaking":2837,"åĤ":2838,"aster":2839,"ody":2840,"Ġ{":2841,"çļĦéĩįè¦ģ":2842,"Ġfore":2843,"èµĦæºIJ":2844,"è¦ģæ±Ĥ":2845,"Ġfeatures":2846,"èįī":2847,"me":2848,"èĮĥ":2849,"Ġoper":2850,"级":2851,"é²ľ":2852,"æĬĢå·§":2853,"ijæĪĺ":2854,"ç±»åŀĭ":2855,"æĿ¿":2856,"软":2857,"ew":2858,"Ġrestaur":2859,"Ġwithout":2860,"ructure":2861,"çļĦæĺ¯":2862,"çı":2863,"Ġlist":2864,"urate":2865,"Ġbook":2866,"亲":2867,"åºĹ":2868,"ä¹Łæĺ¯":2869,"ä»»ä½ķ":2870,"Ġcam":2871,"ĠBe":2872,"Ġgovern":2873,"Ġbehavior":2874,"è®Ńç»ĥ":2875,"Ġfamily":2876,"æĿĤ":2877,"Ġcity":2878,"Ġapproach":2879,"Ġaccurate":2880,"Ġsom":2881,"Ġel":2882,"èĪŀ":2883,"èŀ":2884,"åŁºæľ¬":2885,"Ġdise":2886,"Ġencoura":2887,"ĠWhat":2888,"åĥ":2889,"详":2890,"¦Ĥ":2891,"å·¥åħ·":2892,"åķ¡":2893,"Ġstill":2894,"chool":2895,"æĦŁåΰ":2896,"çĶŁçī©":2897,"åĴĸåķ¡":2898,"åĩĨå¤ĩ":2899,"Ġwaste":2900,"Ġevents":2901,"æķĻèĤ²":2902,"Ġ8":2903,"Ġmust":2904,"ied":2905,"asing":2906,"å½¢æĪIJ":2907,"Ġproducts":2908,"åħ¸":2909,"讲":2910,"fter":2911,"å·®":2912,"less":2913,"Ġcro":2914,"Ġfinan":2915,"åıįåºĶ":2916,"åĪĽéĢł":2917,"Ġguidelines":2918,"åΤ":2919,"ä½ľåĵģ":2920,"表示":2921,"å¼Ĥ":2922,"Ġknown":2923,"Ġtest":2924,"误":2925,"ope":2926,"Ġusers":2927,"AI":2928,"å¾·":2929,"new":2930,"追":2931,"iques":2932,"模åŀĭ":2933,"åĬĽåĴĮ":2934,"Ġhistory":2935,"ĠAl":2936,"æĬķèµĦ":2937,"å°Ŀè¯ķ":2938,"ank":2939,"Ġhome":2940,"éĴŁ":2941,"丰":2942,"èĪĴéĢĤ":2943,"Ġincrease":2944,"Ġhab":2945,"åĪ»":2946,"è¾ĵåĩº":2947,"Ġleading":2948,"Ġ7":2949,"é£İéĻ©":2950,"Ġperformance":2951,"Ġhapp":2952,"åŃ£":2953,"Ġstand":2954,"ty":2955,"ç¦ı":2956,"Ġcustomers":2957,"åįİ":2958,"Ġbelie":2959,"Ġcompany":2960,"å½ķ":2961,"é£Łçī©":2962,"ĠUn":2963,"Ġsumm":2964,"rent":2965,"ĠCon":2966,"éĢĤéĩı":2967,"anced":2968,"Ġi":2969,"Ġlight":2970,"Ġanalysis":2971,"å°Ĭ":2972,"ĠUse":2973,"ouse":2974,"ted":2975,"Ġcharact":2976,"Ġ#":2977,"to":2978,"绾":2979,"ä¸įæĺ¯":2980,"Ġdeveloping":2981,"åŁ¹":2982,"Ġstrategies":2983,"Ġmight":2984,"çŁŃ":2985,"çļĦæİ":2986,"Ġfirst":2987,"èĥĮ":2988,"çĮ«":2989,"Ġincludes":2990,"åĽŃ":2991,"Ġdiagn":2992,"Ġgrowth":2993,"ä¸ĵä¸ļ":2994,"Ġdoes":2995,"12":2996,"绿":2997,"Ġkeep":2998,"详ç»Ĩ":2999,"åĥı":3000,"åıijçĶŁ":3001,"fact":3002,"åı¯ä»¥åľ¨":3003,"ç«Ļ":3004,"æĭī":3005,"æµİ":3006,"Ġchatbots":3007,"Ġbreak":3008,"è¡¡":3009,"çŁ³":3010,"æĮģç»Ń":3011,"life":3012,"Ġ10":3013,"æ´Ĺ":3014,"ĠAdditionally":3015,"士":3016,"ember":3017,"Ġgoals":3018,"å¾®":3019,"Ġview":3020,"·":3021,"ove":3022,"åŁºç¡":3023,"Ġoptimize":3024,"Ġtem":3025,"Ġdown":3026,"åŁºç¡Ģ":3027,"è¶ħ":3028,"ercis":3029,"Ġless":3030,"ees":3031,"æĿĥ":3032,"Ġkey":3033,"Ġworks":3034,"讨":3035,"åı¥åŃIJ":3036,"Ġrobot":3037,"uss":3038,"åħ¨çIJĥ":3039,"ç»ıæµİ":3040,"æīįèĥ½":3041,"egr":3042,"ä»ĸ们çļĦ":3043,"äºĶ":3044,"èµ·æĿ¥":3045,"çĵ":3046,"Ġfactors":3047,"Ġcultural":3048,"æľ¨":3049,"Ġworking":3050,"ä¼¼":3051,"èIJ½":3052,"éĢŁåº¦":3053,"ä½ı":3054,"Ġeffects":3055,"å©ļ":3056,"br":3057,"åİħ":3058,"rain":3059,"\")":3060,"åѦçĶŁ":3061,"\",":3062,"Ġpar":3063,"atform":3064,"Ġensuring":3065,"çͱäºİ":3066,"Ġmuch":3067,"Ġwords":3068,"Ġmar":3069,"ç»ıéªĮ":3070,"为äºĨ":3071,"åIJĪä½ľ":3072,"ven":3073,"Ġ/":3074,"Ġfinancial":3075,"work":3076,"ories":3077,"æ²»":3078,"Ġtechniques":3079,"æĭ¥æľī":3080,"rap":3081,"å°Ķ":3082,"Ġest":3083,"Ġavailable":3084,"Ġlit":3085,"æ¹":3086,"Ġefficient":3087,"els":3088,"over":3089,"Ġland":3090,"Ġarea":3091,"Ġintellig":3092,"Ġpref":3093,"ature":3094,"çŁ¥è¯Ĩ":3095,"æĵįä½ľ":3096,"å¾ħ":3097,"igate":3098,"çļĦæĶ":3099,"Ġmean":3100,"bo":3101,"Ġcontrol":3102,"éĩĩç͍":3103,"ricult":3104,"Ġprogramm":3105,"Ġtowards":3106,"thing":3107,"ä¸įè¦ģ":3108,"Ġthough":3109,"彩":3110,"Ġcertain":3111,"Ġwild":3112,"ä»Ĭ":3113,"Ġconservation":3114,"çŁ¥éģĵ":3115,"Ġreally":3116,"çļĦåľ°":3117,"io":3118,"饰":3119,"Ġful":3120,"çݯä¿Ŀ":3121,"Ġexplore":3122,"çļĦæ¸":3123,"Ġdiverse":3124,"åĬłå¼º":3125,"çļ®":3126,"Ġemotions":3127,"Ġavoid":3128,"'ll":3129,"çļĦæī":3130,"åį¡":3131,"Ġplatform":3132,"ances":3133,"Ġsitu":3134,"ä»ĺ":3135,"ä½įç½®":3136,"oring":3137,"çĽIJ":3138,"ä¸ĩ":3139,"Ġdev":3140,"nov":3141,"ash":3142,"Ġtwo":3143,"å®ł":3144,"bon":3145,"èµ°":3146,"åĪĹ表":3147,"Ġcy":3148,"èįIJ":3149,"ĠSome":3150,"Ġexplain":3151,"Ġaware":3152,"社交":3153,"day":3154,"åıĮ":3155,"æ²ŁéĢļ":3156,"æ°§":3157,"å¼Ģåıij":3158,"åħ¬åı¸çļĦ":3159,"Ġair":3160,"åĩ»":3161,"aring":3162,"éĥ½æĺ¯":3163,"Ġlevels":3164,"ods":3165,"Ġsteps":3166,"Ġcap":3167,"æ´ŀ":3168,"马":3169,"Ġreturn":3170,"Ġmet":3171,"çĶŁæĢģ":3172,"丰å¯Į":3173,"æŁĵ":3174,"æīĢ以":3175,"é¡»":3176,"Ġer":3177,"Ġfra":3178,"30":3179,"èĵ":3180,"âĢĶ":3181,"Ġå½ĵ":3182,"ah":3183,"ä¿ĥ":3184,"Ġlikely":3185,"ĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠĠ":3186,"åĪĿ":3187,"Ġcreating":3188,"Ġfarm":3189,"Ġbal":3190,"Ġlives":3191,"å®ĥçļĦ":3192,"Ġability":3193,"ä¸ĬçļĦ":3194,"Ġsentence":3195,"åĤ¨":3196,"Ġrout":3197,"Ġprovides":3198,"Ġagain":3199,"å®łçī©":3200,"éĢIJ":3201,"Ġyears":3202,"èŀį":3203,"Ġphysical":3204,"Python":3205,"ĠEx":3206,"iting":3207,"è°ĥæķ´":3208,"ç½ij绾":3209,"æħ¢":3210,"空éĹ´":3211,"åĽ°":3212,"è±Ĩ":3213,"æĽ´å¤ļçļĦ":3214,"ĠAr":3215,"Ġmaintain":3216,"å®ŀéĻħ":3217,"Ġtravel":3218,"Ġsat":3219,"pro":3220,"ç͵åŃIJ":3221,"æ±½":3222,"ex":3223,"åģĩ":3224,"æIJŃ":3225,"éļıçĿĢ":3226,"è¿ĺæľī":3227,"礼":3228,"ale":3229,"Ġconsum":3230,"ĊĠ":3231,"ncy":3232,"Ġquestions":3233,"fort":3234,"making":3235,"Ġdesc":3236,"15":3237,"Ġinvolves":3238,"Ġstress":3239,"åŃĹ符":3240,"here":3241,"Ġimpacts":3242,"Ġexercis":3243,"åĿļ":3244,"ledge":3245,"ç§ijæĬĢ":3246,"oci":3247,"Ġeffectively":3248,"æ¶Īè´¹":3249,"Ġconclusion":3250,"éĺħ":3251,"Ġstre":3252,"issions":3253,"æ·»":3254,"It":3255,"éĿĻ":3256,"Ġvirtual":3257,"è¡£":3258,"Ġachieve":3259,"ource":3260,"è¿ŀ":3261,"acks":3262,"è¡¨æł¼":3263,"Ġimportance":3264,"èĩªæĪij":3265,"These":3266,"num":3267,"çļĦæł":3268,"Ġrelationships":3269,"Ġworkers":3270,"gical":3271,"orpor":3272,"erson":3273,"åij¢":3274,"nds":3275,"æİ¨èįIJ":3276,"ohn":3277,"å¿ħé¡»":3278,"容æĺĵ":3279,"ĠGo":3280,"Ġtell":3281,"ĠRes":3282,"onom":3283,"Ġbec":3284,"æ³Ľ":3285,"pos":3286,"Ġmove":3287,"Ġstory":3288,"æŃ¢":3289,"Ġpriorit":3290,"Ġindustries":3291,"èľ":3292,"Ġpossible":3293,"ĠMan":3294,"Ġexpress":3295,"abilities":3296,"Ġintegr":3297,"代表":3298,"Ġrespond":3299,"åĪĨéĴŁ":3300,"æľºä¼ļ":3301,"Ġthings":3302,"交æµģ":3303,"Ġmeth":3304,"urther":3305,"Ġwide":3306,"èijĹ":3307,"æĪijçļĦ":3308,"ĸçķ¥":3309,"ides":3310,"ething":3311,"ĠWhile":3312,"pan":3313,"çŃĸçķ¥":3314,"Ġcent":3315,"Ġplease":3316,"ology":3317,"uracy":3318,"循":3319,"ward":3320,"nce":3321,"Ġthen":3322,"çªģ":3323,"å¥ĩ":3324,"Ġblo":3325,"ai":3326,"æŀĹ":3327,"ç®Ĺæ³ķ":3328,"综":3329,"Ġprint":3330,"aces":3331,"lu":3332,"ªæĸ½":3333,"pre":3334,"çļĦæĦı":3335,"Ġsol":3336,"Ġoverall":3337,"hold":3338,"Ġes":3339,"çļĦä¸Ģ":3340,"éģĩ":3341,"Ġpopul":3342,"å°ı说":3343,"æ³¢":3344,"åįģ":3345,"ä¹Łåı¯ä»¥":3346,"é£Łåĵģ":3347,"Ġcontent":3348,"å°Ħ":3349,"Ġrequires":3350,"æ£ĢæŁ¥":3351,"ĊĠĠĠĠĠĠĠĠĠĠĠ":3352,"Ġgroups":3353,"Ġfair":3354,"Ġbl":3355,"å®ŀéªĮ":3356,"æĮīçħ§":3357,"osp":3358,"str":3359,"ä¸įèĥ½":3360,"Ġharm":3361,"Ġprodu":3362,"çļĦæĬĢ":3363,"çĩ":3364,"tle":3365,"Ġanimals":3366,"è§Ĵèī²":3367,"lev":3368,"æ¸IJ":3369,"å¤įæĿĤ":3370,"Ġdepend":3371,"æĮijæĪĺ":3372,"åĮħåIJ«":3373,"Ġhelps":3374,"Ġopen":3375,"Ġnet":3376,"ĠĠĠĠĠ":3377,"Ġstrong":3378,"Ġjour":3379,"å¹¿æ³Ľ":3380,"æķ´ä¸ª":3381,"Ġelect":3382,"Ġresponse":3383,"åįķè¯į":3384,"æľĭ":3385,"Ġ<":3386,"åĮĸåѦ":3387,"éĴĪ":3388,"Ġquick":3389,"ually":3390,"Ġsomething":3391,"Ġtrack":3392,"度åĴĮ":3393,"erences":3394,"æłij":3395,"Ġaccuracy":3396,"Ġexc":3397,"é£ŀ":3398,"Ġfield":3399,"寻æī¾":3400,"éħ¸":3401,"Ġhope":3402,"çij":3403,"Ġinnov":3404,"绪":3405,"alk":3406,"Ġtypes":3407,"Ġdid":3408,"åĬª":3409,"Ġcall":3410,"è¯Ĺ":3411,"Ġearly":3412,"ĠOne":3413,"app":3414,"Ġcommon":3415,"æľĢç»Ī":3416,"Ġcheck":3417,"Ġsym":3418,"çĤĴ":3419,"æĬĢèĥ½":3420,"Ġenh":3421,"Ġagricult":3422,"Ġimm":3423,"ç»ĩ":3424,"满足":3425,"Ġschool":3426,"bal":3427,"Ġfollowing":3428,"based":3429,"Ġwebs":3430,"Ġculture":3431,"ĠCom":3432,"way":3433,"ä¸Ģå®ļ":3434,"åķĨåĵģ":3435,"ude":3436,"çļĦåıijå±ķ":3437,"çĶŁäº§":3438,"osystem":3439,"Ġplant":3440,"åı¶":3441,"åIJĥ":3442,"ä»ĸçļĦ":3443,"der":3444,"询":3445,"å®¶åħ·":3446,"Ġfree":3447,"ç§»":3448,"æİĮ":3449,"Ġbody":3450,"Ġpresent":3451,"Ġparticularly":3452,"Ġchildren":3453,"Ġstudent":3454,").":3455,"çī¹å¾ģ":3456,"èĶ":3457,"éĺħ读":3458,"æķĪçİĩ":3459,"Ġprogram":3460,"éħ±":3461,"åıĺå¾Ĺ":3462,"ix":3463,"Ġcome":3464,"çļĦæ²":3465,"ĠTe":3466,"ĠTo":3467,"åħ±åIJĮ":3468,"Ġemployees":3469,"说æĺİ":3470,"Ġheart":3471,"Ġmot":3472,"æľĭåıĭ":3473,"eric":3474,"è¯ij":3475,"Ġcurrent":3476,"æĪIJæľ¬":3477,"Ġtoo":3478,"çݩ家":3479,"åĪĽæĸ°":3480,"Ġecosystem":3481,"常è§ģ":3482,"ä¸ĢæŃ¥":3483,"Ġpres":3484,"Ġmulti":3485,"åijĬè¯ī":3486,"严":3487,"Ġmit":3488,"Ġaction":3489,"çĨŁ":3490,"Ġhabit":3491,"åı£æĦŁ":3492,"ç®±":3493,"Ġuses":3494,"å¢ŀ强":3495,"ç»Ļåĩº":3496,"Ġ9":3497,"Ġdep":3498,"Ġeconomic":3499,"æĢ§çļĦ":3500,"18":3501,"åĨ°":3502,"Ġhelped":3503,"åIJ¸å¼ķ":3504,"çİĭ":3505,"Ġdiagnos":3506,"åł":3507,"èģĶç³»":3508,"群":3509,"ç»ĥä¹ł":3510,"æĪIJéķ¿":3511,"Ġpoint":3512,"å®ļæľŁ":3513,"åij¼":3514,"èį¯":3515,"æĿ¯":3516,"æ¤Ĵ":3517,"æķĪæŀľ":3518,"Ġspecial":3519,"æ··":3520,"åĩłä¸ª":3521,"ause":3522,"éĨ":3523,"æ¯ĶèµĽ":3524,"è·Ŀ":3525,"What":3526,"Ġtimes":3527,"icles":3528,"Ġ*":3529,"ç´§":3530,"å¦Ĥæŀľä½ł":3531,"çĭ¬çī¹":3532,"çģµ":3533,"ç¨İ":3534,"Ġcarbon":3535,"Ġbias":3536,"åĬ©äºİ":3537,"Ġconst":3538,"èĩªçͱ":3539,"æĿ¥è¯´":3540,"å°±æĺ¯":3541,"åį°":3542,"Ġmeet":3543,"è§ĦåĪĴ":3544,"çļĦç¾":3545,"èIJ¥åħ»":3546,"ators":3547,"稳å®ļ":3548,"ode":3549,"çħ®":3550,"Ġassoci":3551,"å¿Ĺ":3552,"è¡ĮæĺŁ":3553,"æĿİ":3554,"Ġreview":3555,"åĩĢ":3556,"ĠRo":3557,"Ġknowledge":3558,"以便":3559,"æµĭè¯ķ":3560,"åIJĪéĢĤ":3561,"sc":3562,"å½¢å¼ı":3563,"Ġfriends":3564,"Ġnature":3565,"Ġcritical":3566,"æ´ĭ":3567,"Ġafter":3568,"erve":3569,"Ġrece":3570,"çļĦæŃ":3571,"汽车":3572,"çķĮ":3573,"Ġloss":3574,"Ġapplications":3575,"å¤ļç§į":3576,"éĶħ":3577,"串":3578,"Ġinsp":3579,"---":3580,"ĠSh":3581,"Ġvol":3582,"lut":3583,"oks":3584,"sequ":3585,"Ġbir":3586,"åIJĪçIJĨ":3587,"Ġnecess":3588,"æĪijæĥ³":3589,"çŃīæĸ¹éĿ¢":3590,"é¼ĵ":3591,"Ġsoft":3592,"Ġlive":3593,"å°ıæĺİ":3594,"ĠInd":3595,"Ġbring":3596,"æĺ¯æĮĩ":3597,"Ġsoil":3598,"ilar":3599,"举":3600,"æĿ¡ä»¶":3601,"Ġtri":3602,"亮":3603,"Ġmom":3604,"æı¡":3605,"ä¼°":3606,"ŀäºī":3607,"çĽij":3608,"èĤ¤":3609,"è´¢åĬ¡":3610,"æ·»åĬł":3611,"é¥®é£Ł":3612,"Ġallowing":3613,"åºķ":3614,"Ġright":3615,"Ġexpert":3616,"Ġsupp":3617,"Ġinit":3618,"çļĦæµ":3619,"arget":3620,"Ġexpect":3621,"Ġ19":3622,"Ġmeasures":3623,"olutions":3624,"just":3625,"arc":3626,"å°ļ":3627,"Ġpractice":3628,"æľīåĬ©äºİ":3629,"大éĩı":3630,"',":3631,"iment":3632,"Ġcontinue":3633,"Ġdiscuss":3634,"100":3635,"éļľ":3636,"çļĦæĦŁ":3637,"Ġreflect":3638,"itation":3639,"åį«":3640,"äºĨä¸Ģ":3641,"ney":3642,"ĠLe":3643,"ised":3644,"è¶ĭ":3645,"äºĨä¸Ģ个":3646,"Ġincreasing":3647,"çļĦæĮ":3648,"Ġstru":3649,"æĢ»ç»ĵ":3650,"ely":3651,"å®ĩ":3652,"Ġauthor":3653,"表éĿ¢":3654,"Ġx":3655,"æķħäºĭ":3656,"emic":3657,"Ġrepresent":3658,"ger":3659,"Ġincreased":3660,"ones":3661,"ains":3662,"Ġtrained":3663,"Ġfish":3664,"Ġstate":3665,"åĨ·":3666,"çĶŁéķ¿":3667,"Ġrenew":3668,"ording":3669,"åĮĹ":3670,"æİªæĸ½":3671,"平衡":3672,"Ġsuccessful":3673,"ä¸ĭéĿ¢":3674,"Ġactivity":3675,"èĮ¶":3676,"éĢĤåºĶ":3677,"èĦij":3678,"æİ¢ç´¢":3679,"ffic":3680,"ç»ĦæĪIJ":3681,"atives":3682,"äºļ":3683,"Ġscen":3684,"æ²Ļ":3685,"gress":3686,"使å¾Ĺ":3687,"æī¿":3688,"Ġdiscrim":3689,"Ġassistants":3690,"Ġexist":3691,"çķĻ":3692,"Ġspace":3693,"æľĢè¿ij":3694,"Ġideas":3695,"éĩĩåıĸ":3696,"light":3697,"注éĩį":3698,"çļĦæĹ¶éĹ´":3699,"è¿İ":3700,"Ġcomb":3701,"éĢĤå½ĵ":3702,"Ġyourself":3703,"rite":3704,"ason":3705,"åĮĢ":3706,"åı¯ä»¥ä½¿ç͍":3707,"åħħ满":3708,"Ġvalues":3709,"æ½":3710,"Ġbiases":3711,"ä¿ĥè¿Ľ":3712,"åľºæĻ¯":3713,"ross":3714,"åį³åı¯":3715,"Ġcru":3716,"Ġnumber":3717,"Ġtype":3718,"rast":3719,"åĩĨç¡®":3720,"This":3721,"Ġpast":3722,"çģ¯":3723,"å®ļä¹ī":3724,"Ġsolutions":3725,"Ġter":3726,"ä¿Ŀè¯ģ":3727,"èͬ":3728,"幸":3729,"åī§":3730,"åħ´è¶£":3731,"åª":3732,"ention":3733,"avor":3734,"Ġscient":3735,"åĬªåĬĽ":3736,"Ġproviders":3737,"Ġpolicies":3738,"alu":3739,"ĠIm":3740,"Ġallows":3741,"Ġintelligence":3742,"çļĦæĸ¹æ³ķ":3743,"è¿Ļæĺ¯":3744,"Ġ`":3745,"Ġemissions":3746,"Ġå°Ĩ":3747,"Ġmeaning":3748,"Ġstyle":3749,"åİŁåĽł":3750,"Ġstrugg":3751,"çļĦç¾İ":3752,"iful":3753,"dition":3754,"éĥ½æľī":3755,"空æ°Ķ":3756,"å®ĥ们çļĦ":3757,"ä¼ĺåĮĸ":3758,"Ġinflu":3759,"åŁºäºİ":3760,"Ġdetails":3761,"Ġtransparency":3762,"Ġmess":3763,"ĠCl":3764,"Ġgame":3765,"pri":3766,"è¶ĭåĬ¿":3767,"å½Ĵ":3768,"ç¿»è¯ij":3769,"æķ£":3770,"By":3771,"éŃ":3772,"ĠAmeric":3773,"Ġproduction":3774,"Ġincorpor":3775,"æĻļ":3776,"Ġinvolve":3777,"Ġhot":3778,"æĻ®":3779,"by":3780,"Ġflow":3781,"Ġemerg":3782,"座":3783,"Ġidea":3784,"åİĭåĬĽ":3785,"éĿĴ":3786,"oms":3787,"èģĮä¸ļ":3788,"Ġreport":3789,"Ġpap":3790,"Ġtherap":3791,"Ġsal":3792,"åıĤä¸İ":3793,"æĸĩåѦ":3794,"æIJŃéħį":3795,"oot":3796,"),":3797,"Ġcr":3798,"Ġprocesses":3799,"gin":3800,"å¹³åı°":3801,"å¯Ł":3802,"Ġpromoting":3803,"æļĸ":3804,"akehold":3805,"ç»§":3806,"iver":3807,"æ¦Ĥ":3808,"Ġmodels":3809,"Ġdra":3810,"èĸ":3811,"Ġgroup":3812,"è¶³å¤Ł":3813,"Ġgreen":3814,"Ġhealthy":3815,"Ġcomfort":3816,"Ġadditional":3817,"ä¸Ģ次":3818,"é¤IJåİħ":3819,"Ġmaterials":3820,"Ġmanage":3821,"çļĦæ¯":3822,"伤":3823,"åıĬæĹ¶":3824,"Ġglo":3825,"Ġstat":3826,"å¿«éĢŁ":3827,"Ġmonitoring":3828,"aily":3829,"rand":3830,"oice":3831,"resh":3832,"ç»Ħç»ĩ":3833,"Ġunder":3834,"Ġnecessary":3835,"Ġhelpful":3836,"ĠCol":3837,"é»ijæ´ŀ":3838,"åģļåĩº":3839,"Ġcourse":3840,"Ġmat":3841,"Ġleg":3842,"Ġface":3843,"令":3844,"èī¯å¥½çļĦ":3845,"ock":3846,"åĮ»çĸĹ":3847,"çĽĸ":3848,"idence":3849,"Ġassociated":3850,"Ġprogress":3851,"åľĨ":3852,"Ġeveryone":3853,"ç¼ĵ":3854,"ĠEng":3855,"word":3856,"èĵĿ":3857,"天æ°Ķ":3858,"Ġactions":3859,"ems":3860,"ĠPl":3861,"å®Ļ":3862,"ush":3863,"顾":3864,"Ġcosts":3865,"ator":3866,"ç©¿":3867,"Ġamounts":3868,"èͬèıľ":3869,"..":3870,"Ġmanner":3871,"Ġconsequ":3872,"æ°ĶåĢĻ":3873,"Ġinsights":3874,"being":3875,"atory":3876,"ener":3877,"lex":3878,"Ġmeans":3879,"Ġcollaboration":3880,"Ġperspect":3881,"orm":3882,"priate":3883,"å°Ĭéĩį":3884,"Ġtarget":3885,"è®°å½ķ":3886,"åĢĴ":3887,"Ġrenewable":3888,"æĦ¿":3889,"èĥ½æºIJ":3890,"Ġinput":3891,"å®ĩå®Ļ":3892,"ape":3893,"Ġadjust":3894,"eries":3895,"Ġdire":3896,"ä¾Ŀ":3897,"ustr":3898,"fect":3899,"Ġbeautiful":3900,"Ġdue":3901,"reci":3902,"çĮ®":3903,"èĥĮæĻ¯":3904,"èĤ¡":3905,"Ġdam":3906,"ik":3907,"Ġadvanced":3908,"çĽ¸å¯¹":3909,"åIJįç§°":3910,"Ġshort":3911,"Ġobject":3912,"è¿ĻéĩĮ":3913,"éĢłæĪIJ":3914,"èIJ¥éĶĢ":3915,"çļĦæĥħæĦŁ":3916,"票":3917,"Ġcountries":3918,"ining":3919,"istic":3920,"Ġplans":3921,"责任":3922,"Ġstakehold":3923,"the":3924,"Ġassess":3925,"æĢĿèĢĥ":3926,"ech":3927,"æĪIJåijĺ":3928,"21":3929,"Ġdaily":3930,"Ġcomput":3931,"çļĦæĥħåĨµ":3932,"æıIJåĩº":3933,"ĠâĢľ":3934,"åªĴ":3935,"ä¸Ńå¿ĥ":3936,"ished":3937,"ĠSe":3938,"onomous":3939,"ern":3940,"ç»´æĬ¤":3941,"ames":3942,"Ġprioritize":3943,"纸":3944,"èĤ¥":3945,"Ġtemper":3946,"æ¸ħæ´ģ":3947,"use":3948,"污":3949,"Ġminim":3950,"æĺ¯åľ¨":3951,"大å°ı":3952,"åĵªäºĽ":3953,"Ġappreci":3954,"reng":3955,"Ġregulations":3956,"ĠZ":3957,"éĶĻ误":3958,"rans":3959,"èĢĮä¸Ķ":3960,"èά":3961,"èij±":3962,"èĨ":3963,"æ°´å¹³":3964,"è´Ńçī©":3965,"åŃĹ符串":3966,"对æĸ¹":3967,"Ġhim":3968,"Ġconsequences":3969,"å·´":3970,"é¼ĵåĬ±":3971,"Ġfil":3972,"人åijĺ":3973,"è·Ŀ离":3974,"ĠWhen":3975,"çļĦæ°´":3976,"çī©çIJĨ":3977,"åIJĮæĹ¶ä¹Ł":3978,"åľ¨è¿Ļ个":3979,"åħ¶æ¬¡":3980,",\"":3981,"æ¶²":3982,"çĶ·":3983,"ival":3984,"åı¯ä»¥è®©":3985,"æĥ¯":3986,"Ġadvance":3987,"Ġveh":3988,"å¦ĤæŀľæĤ¨":3989,"Ġestab":3990,"ript":3991,"端":3992,"ä¸įä¼ļ":3993,"Ġtransparent":3994,"æķ°éĩı":3995,"çĽĺ":3996,"Ġspeak":3997,"Ġpark":3998,"Ġstakeholders":3999,"éº":4000,"Ġevent":4001,"çļĦæķ°æį®":4002,"èĩªåĬ¨":4003,"ç»ĨèĬĤ":4004,"è¯Ħä¼°":4005,"润":4006,"Ġpreferences":4007,"Ġveget":4008,"æįŁ":4009,"equ":4010,"Ġgl":4011,"Ġpain":4012,"ogra":4013,"Ġtraffic":4014,"Ġoce":4015,"ä¹ĺ":4016,"ext":4017,"âĢĿï¼Į":4018,"Ġanother":4019,"å¤ļå°ij":4020,"Ġagainst":4021,"ç»ıåİĨ":4022,"计ç®Ĺæľº":4023,"èĢIJ":4024,"软件":4025,"ĠPre":4026,"Ġplants":4027,"缸äºĴ":4028,"é¢ij":4029,"\\_":4030,"Ġsame":4031,"rug":4032,"Ġvalu":4033,"Ġocc":4034,"çļĦç¤":4035,"Ġsustainability":4036,"ĠShe":4037,"de":4038,"ote":4039,"Ġdig":4040,"NA":4041,"Ġcrucial":4042,"æī§":4043,"å±Ģ":4044,"æĭŁ":4045,"æĭĮ":4046,"Ġnon":4047,"Ġengaging":4048,"Ġintern":4049,"LP":4050,"温度":4051,"æł¸":4052,"æĬ¥åijĬ":4053,"æĿ¥è¶Ĭ":4054,"hood":4055,"ä¸ī个":4056,"å¦Ĥä¸ĭ":4057,"çī©ä½ĵ":4058,"force":4059,"Ġneeded":4060,"Ġimages":4061,"Ġbuilding":4062,"icious":4063,"ĠæĪij":4064,"è¶ĬæĿ¥è¶Ĭ":4065,"æĶ¾åħ¥":4066,"go":4067,"éĻįä½İ":4068,"å½ĵåľ°":4069,"æ¶Īè´¹èĢħ":4070,"ç£":4071,"iversity":4072,"é¢Ħç®Ĺ":4073,"icle":4074,"æ··åIJĪ":4075,"Ġparticip":4076,"Ġdishes":4077,"Ġthroughout":4078,"Ġwithin":4079,"åı³":4080,"é«ĺçļĦ":4081,"Ġphot":4082,"Ġtrust":4083,"æĦıè¯Ĩ":4084,"以确ä¿Ŀ":4085,"çĬ¶æĢģ":4086,"Ġautomation":4087,"11":4088,"Ġpost":4089,"æīĭæľº":4090,"works":4091,"éĢı":4092,"åºĵ":4093,"Ġwind":4094,"Ġ==":4095,"Ġprocessing":4096,"èĮĥåĽ´":4097,"æĦıä¹ī":4098,"追æ±Ĥ":4099,"é":4100,"å¾Ħ":4101,"éĿł":4102,"ä¸ĸ":4103,"èϽ":4104,"ç«ŀäºī":4105,"Ġappropriate":4106,"æĽ´å¥½çļĦ":4107,"Ġcharacter":4108,"cl":4109,"ç§ĺ":4110,"itude":4111,"Ġteac":4112,"leep":4113,"ĠDevelop":4114,"ince":4115,"å·¦":4116,"ground":4117,"è¡Įä¸ļ":4118,"éĴĪ对":4119,"å¿ħè¦ģ":4120,"Ġdeterm":4121,"----------------":4122,"Ġstreng":4123,"do":4124,"Ġchallenging":4125,"ork":4126,"Ġanx":4127,"èī²çļĦ":4128,"Ġhard":4129,"æĺİç¡®":4130,"åĪĨ享":4131,"æĶ¹åıĺ":4132,"ä½³":4133,"åıªæľī":4134,"å±ķ示":4135,"Ġcamp":4136,"纳":4137,"aj":4138,"etic":4139,"ument":4140,"ä½łåı¯ä»¥":4141,"Ġpollut":4142,"Ġhig":4143,"pping":4144,"ead":4145,"çĦ¶èĢĮ":4146,"第äºĮ":4147,"鸣":4148,"çī©åĵģ":4149,"举":4150,"Ġencourage":4151,"pecial":4152,"Ġacross":4153,"elves":4154,"äºĭä»¶":4155,"cle":4156,"æ©":4157,"åªĴä½ĵ":4158,"ners":4159,"Ġcal":4160,"èϽçĦ¶":4161,"åĽº":4162,"ä¹łæĥ¯":4163,"Ġsafe":4164,"èĥ½éĩı":4165,"istics":4166,"ä¹ĭåīį":4167,"Ġissue":4168,"å¤ļ个":4169,"åĨ³çŃĸ":4170,"è¾¾åΰ":4171,"æĹ©":4172,"ä¸įåı¯":4173,"ä¸Ģ缴":4174,"å·¨":4175,"æĦŁè°¢":4176,"ĠNew":4177,"ä¸Ģ段":4178,"Ġmachines":4179,"å°Ĩåħ¶":4180,"ç»§ç»Ń":4181,"Ġword":4182,"çī¹åĪ«":4183,"Ġagriculture":4184,"æĢİ":4185,"éĢIJæ¸IJ":4186,"éĵ¾":4187,"课":4188,"Ġkind":4189,"å¢Ļ":4190,"谢谢":4191,"Ġalgorithm":4192,"è£ħ饰":4193,"Ġalong":4194,"Ġeasy":4195,"äºij":4196,"è§£åĨ³æĸ¹æ¡Ī":4197,"Ġawareness":4198,"'ve":4199,"æĸ¹åIJij":4200,"Ġnever":4201,"Ġquickly":4202,"Ġrespect":4203,"çļĦæĻ":4204,"Ġamong":4205,"Ġaccountability":4206,"Ġlaw":4207,"ening":4208,"Ġdefin":4209,"Ġsurround":4210,"éĵģ":4211,"Ġpowerful":4212,"An":4213,"Ġcause":4214,"æ¥":4215,"æİĮæı¡":4216,"è¿ĺæĺ¯":4217,"Ġcreative":4218,"è¡Ģ":4219,"Ġlocated":4220,"unning":4221,"åľ°åĮº":4222,"éĿ¢ç§¯":4223,"鼨":4224,"Ġnear":4225,"Ġiniti":4226,"ression":4227,"ä¸ĭæĿ¥":4228,"25":4229,"é©¶":4230,"¾çĹħ":4231,"ables":4232,"æľīè¶£":4233,"循çݯ":4234,"çŃĶæ¡Ī":4235,"çł´":4236,"ication":4237,"éĻ¢":4238,"æ²»çĸĹ":4239,"Ġaddition":4240,"äºĭæĥħ":4241,"Ġbecause":4242,"åıĪ":4243,"èĤĮ":4244,"纪":4245,"side":4246,"æĭħ":4247,"湿":4248,"åįĬ":4249,"顺":4250,"ĠAnd":4251,"Ġrestaurant":4252,"Ġvide":4253,"Ġproblem":4254,"azing":4255,"Ġmembers":4256,"Ġnut":4257,"Ġcou":4258,"浪":4259,"Ġè¿Ļ":4260,"Ġhelping":4261,"ĠIs":4262,"æıIJåįĩ":4263,"ĠĠĠĠĠĠ":4264,"Ġsho":4265,"Ġrelev":4266,"Ġarg":4267,"Ġbalance":4268,"illed":4269,"æĺ¯ä»Ģä¹Ī":4270,"åĬĽéĩı":4271,"ired":4272,"å¤ľ":4273,"åı¯æĮģç»Ń":4274,"Ġperfect":4275,"**":4276,"ification":4277,"æ¶ī":4278,"Ġwildlife":4279,"ane":4280,"Ġrelated":4281,"室åĨħ":4282,"åºľ":4283,"享åıĹ":4284,"ours":4285,"è·ij":4286,"åķĨä¸ļ":4287,"aching":4288,"Ġsun":4289,"Ġrecognition":4290,"elt":4291,"Ġorder":4292,"å¹³åĿĩ":4293,"ging":4294,"临":4295,"çĤ¼":4296,"Ġgoing":4297,"åij¼åIJ¸":4298,"Ġsoftware":4299,"Ġremot":4300,"èijĹåIJį":4301,"幸ç¦ı":4302,"Ġenhance":4303,"èĻļ":4304,"Ġnow":4305,"Ġthreat":4306,"Ġdest":4307,"åĿĩåĮĢ":4308,"Ġacad":4309,"åºĶ对":4310,"çľĭåΰ":4311,"cast":4312,"è¾Ĩ":4313,"ificial":4314,"Ġvery":4315,"ook":4316,"åĮºåŁŁ":4317,"¹ģ":4318,"æĪ¿éĹ´":4319,"æıIJä¾ĽäºĨ":4320,"Ġmotiv":4321,"Ġaccessible":4322,"åĨ³å®ļ":4323,"Ġhy":4324,"å®Ī":4325,"Ġflo":4326,"ug":4327,"Ġinformed":4328,"åĵģè´¨":4329,"çļĦçŁ":4330,"aves":4331,"arr":4332,"ĠWith":4333,"let":4334,"è§ĤçĤ¹":4335,"enge":4336,"è¡ĮåĬ¨":4337,"friend":4338,"ç³ķ":4339,"Ġfurther":4340,"ĠEns":4341,"ç§ģ":4342,"Ġado":4343,"Ġclean":4344,"缸åºĶ":4345,"Ġfre":4346,"pecially":4347,"èĹ":4348,"Ġcapt":4349,"çļĦçľ":4350,"Ġsomeone":4351,"Ġcell":4352,"æĶ¾åľ¨":4353,"欢è¿İ":4354,"ĠâĢ":4355,"Ġdevices":4356,"çļĦæĸ¹å¼ı":4357,"Ġjobs":4358,"augh":4359,"not":4360,"æľīäºĽ":4361,"åħ¬åħ±":4362,"gest":4363,"çļĦçĶŁæ´»":4364,"çľ¼":4365,"çļĦä¿¡æģ¯":4366,"ĠCons":4367,"æİĴåºı":4368,"Ġbenefit":4369,"rect":4370,"å¤ı":4371,"unte":4372,"符åIJĪ":4373,"ä¸Ģä½į":4374,"åĨħéĥ¨":4375,"Ġlooking":4376,"ding":4377,"æĬĺ":4378,"è¾ij":4379,"è¿Ļ个éĹ®é¢ĺ":4380,"Ġespecially":4381,"çľł":4382,"âĢĿãĢĤ":4383,"å¥ı":4384,"ray":4385,"è¿ĺåı¯ä»¥":4386,"åĪĽä½ľ":4387,"coming":4388,"Ġmultiple":4389,"éļIJ":4390,"泡":4391,"æłĩåĩĨ":4392,"Ġmil":4393,"éľĢè¦ģ注æĦı":4394,"Ġanxiety":4395,"æĶ¹è¿Ľ":4396,"å±ĭ":4397,"污æŁĵ":4398,"ç¼ĸç¨ĭ":4399,"è´¹ç͍":4400,"Ġevalu":4401,"imately":4402,"Ġliter":4403,"ograph":4404,"Ġsearch":4405,"16":4406,"enced":4407,"Ġmethods":4408,"çĥĪ":4409,"模å¼ı":4410,"çĬ¶åĨµ":4411,"æĶ¹åĸĦ":4412,"å¤ļæł·":4413,"cer":4414,"å¥ĸ":4415,"Ġsatis":4416,"Ġwebsite":4417,"åĬŀ":4418,"åģ¥èº«":4419,"Ġglobal":4420,"Ġask":4421,"Ġplatforms":4422,"Ġdiseases":4423,"çݰ象":4424,"tics":4425,"æ±ģ":4426,"åΤæĸŃ":4427,"Ġconvers":4428,"Ġrelationship":4429,"设置":4430,"æ³ķå¾ĭ":4431,"Ġmindful":4432,"é¢Ħæµĭ":4433,"overy":4434,"åģľ":4435,"ç͵è§Ĩ":4436,"è§ĦåĪĻ":4437,"aken":4438,"Ġimplementing":4439,"ising":4440,"åıĤåĬł":4441,"æĥħ绪":4442,"Ġprovided":4443,"æ·±åħ¥":4444,"Ġprogrammed":4445,"Ġrelevant":4446,"çļĦçĥ":4447,"çĸ¾çĹħ":4448,"åĮ»çĶŁ":4449,"åĪĽå»º":4450,"Ġgenerate":4451,"æĶ¶åħ¥":4452,"ä¼ij":4453,"izes":4454,"Ġtransform":4455,"éģµ":4456,"astic":4457,"åijĪ":4458,"æ¯ı个人":4459,"è¿Ķ":4460,"iet":4461,"Ġvoice":4462,"éĢĶ":4463,"æĶ¾æĿ¾":4464,"åį´":4465,"èĥľ":4466,"Ġstructure":4467,"æĹ¶å°ļ":4468,"ĠQ":4469,"Ġelse":4470,"duc":4471,"Ġemp":4472,"èģļ":4473,"è´§":4474,"aches":4475,"ç§Ģ":4476,"anks":4477,"Ġnight":4478,"Ġprofessionals":4479,"Ġbas":4480,"è´µ":4481,"ec":4482,"Ġdiversity":4483,"ites":4484,"dr":4485,"åĽ°éļ¾":4486,"ĥåľ":4487,"åŀĥåľ":4488,"åŀĥåľ¾":4489,"Ġdrug":4490,"碳":4491,"Ġname":4492,"åĮĸçļĦ":4493,"aid":4494,"æľĢ大":4495,"æijĦ":4496,"ç®ĢåįķçļĦ":4497,"Ġwarm":4498,"Ġdone":4499,"Ġfunction":4500,"asc":4501,"强è°ĥ":4502,"Ġdemand":4503,"Ġvisual":4504,"Ġupd":4505,"æŃ£åľ¨":4506,"Ġsimilar":4507,"éĢĴ":4508,"æ¯Ľ":4509,"éĶ»":4510,"ently":4511,"Ġvaluable":4512,"Ġdisaster":4513,"ä¸Ģèά":4514,"æ´²":4515,"ĠReg":4516,"Ġdiscrimination":4517,"åĨĻä¸Ģç¯ĩ":4518,"Ġgovernment":4519,"Ġ好çļĦ":4520,"500":4521,"lying":4522,"Ġprev":4523,"Ġprepare":4524,"Ġproblems":4525,"è·³":4526,"Ġprom":4527,"åĨ²":4528,"å®īè£ħ":4529,"éĶ»çĤ¼":4530,"æµĵ":4531,"è¹":4532,"åºĶç͍ç¨ĭåºı":4533,"ng":4534,"Ġcompet":4535,"åĪĨåĪ«":4536,"ological":4537,"审":4538,"Ġtransl":4539,"Ġdirect":4540,"åīĤ":4541,"Ġsuggestions":4542,"Ġpaper":4543,"Ġrecognize":4544,"ton":4545,"Ġmitigate":4546,"讨论":4547,"äºĴåĬ¨":4548,"ĠEar":4549,"Ġamazing":4550,"cre":4551,"é¦Ī":4552,"Ġinvolved":4553,"face":4554,"æľīåħ³":4555,"))":4556,"Ġexce":4557,"Ġproductivity":4558,"èŃ":4559,"é¦Ĩ":4560,"Ġsounds":4561,"Ġidentifying":4562,"],":4563,"é¾Ļ":4564,"Ġfit":4565,"Ġcontribute":4566,"ths":4567,"friendly":4568,"ele":4569,"ified":4570,"iveness":4571,"itely":4572,"ĠX":4573,"Ġled":4574,"åĿı":4575,"Ġhistor":4576,"Ġdat":4577,"Ġjourney":4578,"Ġ}":4579,"Ġselect":4580,"漫":4581,"Ġconduct":4582,"è¿Ľä¸ĢæŃ¥":4583,"ç»ĻæĪij":4584,"Ġlif":4585,"è£ħä¿®":4586,"为ä»Ģä¹Ī":4587,"京":4588,"Ġnav":4589,"Ġwhole":4590,"ç¹ģ":4591,"åĨľ":4592,"æĶ»":4593,"Ġbreat":4594,"Ġmiss":4595,"é¾Ħ":4596,"tt":4597,"sw":4598,"Ġbar":4599,"请éĹ®":4600,"èģĶç½ij":4601,"Ġattract":4602,"æĤ¨åı¯ä»¥":4603,"One":4604,"åħħåĪĨ":4605,"ring":4606,"Ġå½ĵçĦ¶":4607,"ream":4608,"Ġevol":4609,"Ġsn":4610,"ĠEm":4611,"mosp":4612,"Ġchoose":4613,"view":4614,"Ġarr":4615,"Ġsleep":4616,"ended":4617,"æŀ¶":4618,"Ġvehicles":4619,"Ġfresh":4620,"Ġorganization":4621,"è¿Ļ段":4622,"汤":4623,"ĠInt":4624,"Ġcontext":4625,"åı¦å¤ĸ":4626,"Ġocean":4627,"æĦŁåıĹ":4628,"Ġpollution":4629,"urb":4630,"æī§è¡Į":4631,"ersonal":4632,"ĠHealth":4633,"ä¼ĺçĤ¹":4634,"Ġattention":4635,"æľīçĿĢ":4636,"é£ŁæĿIJ":4637,"Ġerr":4638,"çļĦæĿ¥":4639,"çļĦçĪ":4640,"èѦ":4641,"è·Ł":4642,"æĹħè¡Į":4643,"èĴľ":4644,"çļĦæĢĿ":4645,"Ġchatbot":4646,"çļĦéľĢæ±Ĥ":4647,"çķ¥":4648,"Ġfeeling":4649,"Ġimplemented":4650,"社åĮº":4651,"çļĦ建议":4652,"æIJħ":4653,"éĹ»":4654,"åıįé¦Ī":4655,"缴æİ¥":4656,"æĺ¥":4657,"itable":4658,"æĪijä¼ļ":4659,"åį±":4660,"èī¯å¥½":4661,"Ġliving":4662,"åıĺéĩı":4663,"ĠBut":4664,"Ġcomplete":4665,"Ġtrends":4666,"Ġmakes":4667,"ä»Ĭ天":4668,"Ġdistribut":4669,"Ġcommit":4670,"Ġatmosp":4671,"ä¼´":4672,"Ġsensors":4673,"Ġsw":4674,"æĹłè®º":4675,"omen":4676,"æĶ¿åºľ":4677,"Ġchallenge":4678,"Ġturn":4679,"çIJĨ论":4680,"par":4681,"Ġwrite":4682,"ç»ıåħ¸":4683,"emember":4684,"é¥Ń":4685,"æĸ¹ä¾¿":4686,"Ġcu":4687,"Ġvalue":4688,"Ġfund":4689,"pose":4690,"è°ĥæŁ¥":4691,"çĿ¡":4692,"Ġcommunicate":4693,"Ġdisease":4694,"Ġresearc":4695,"Ġlack":4696,"arning":4697,"ĠPark":4698,"çĦ¦":4699,"é«ĺ度":4700,"Ġrather":4701,"宣":4702,"çζ":4703,"éĺ¶":4704,"订":4705,"çĥ§":4706,"Ġhigher":4707,"Ġsummary":4708,"ĠAut":4709,"çļĦæ³":4710,"Ġele":4711,"isms":4712,"Ġreli":4713,"ä¹Łä¼ļ":4714,"fra":4715,"åijĬè¯īæĪij":4716,"æĬ½":4717,"Ġsituations":4718,"Ġmarine":4719,"æĥ³è¦ģ":4720,"inci":4721,"inal":4722,"Ġgain":4723,"Ġdifference":4724,"æľºåĻ¨äºº":4725,"æµģç¨ĭ":4726,"ĠChat":4727,"ç½ijç«Ļ":4728,"æľ«":4729,"Ġcolor":4730,"Ġaspect":4731,"ç½Ĺ":4732,"ĠEduc":4733,"Ġdeploy":4734,"Ġbeauty":4735,"æĤ£":4736,"ruction":4737,"itut":4738,"æĿŁ":4739,"让æĪij们":4740,"éķ¿åº¦":4741,"ules":4742,"æ¶īåıĬ":4743,"Ġdigital":4744,"Ġexisting":4745,"ĠOr":4746,"\\_\\_":4747,"Ġbackground":4748,"çĹĩ":4749,"æ¯ı天":4750,"python":4751,"Ġfarmers":4752,"Ġcontinu":4753,"\":":4754,"Ġgiven":4755,"å°ıæĹ¶":4756,"Ġmoment":4757,"200":4758,"John":4759,"éĿ¢å¯¹":4760,"Ġintro":4761,"Ġtherapy":4762,"è¿ĶåĽŀ":4763,"å¹¶åľ¨":4764,"Ġz":4765,"Ġafford":4766,"ä¸Ŀ":4767,"宽":4768,"ĠÃ":4769,"ĠNational":4770,"èĥ¡":4771,"Ġexercise":4772,"æIJħæĭĮ":4773,"æĶ¯ä»ĺ":4774,"éĺ³åħī":4775,"è¯ļ":4776,"Ġsect":4777,"ĠSu":4778,"å¢ŀéķ¿":4779,"ç¾İ丽":4780,"Ġwa":4781,"以ä¸ĭæĺ¯ä¸ĢäºĽ":4782,"èĽĭç³ķ":4783,"Ġill":4784,"æ¸ħæĻ":4785,"etry":4786,"梦":4787,"ç¾İåĽ½":4788,"ä»į":4789,"oney":4790,"Ġecosystems":4791,"æĮĩ导":4792,"def":4793,"99":4794,"æŁĶ":4795,"pped":4796,"Ġlimit":4797,"çİī":4798,"Ġacademic":4799,"Ġrestaurants":4800,"Ġhead":4801,"ä¿¡ä»»":4802,"asters":4803,"å²ģ":4804,"akers":4805,"14":4806,"As":4807,"æł¡":4808,"é«ĺæķĪ":4809,"phas":4810,"yn":4811,"ç¨ĭ度":4812,"è¾£":4813,"ä¸ĬéĿ¢":4814,"å®¶å±ħ":4815,"term":4816,"ç¾İé£Ł":4817,"Ġovers":4818,"å®ĺ":4819,"Ġindic":4820,"ĠYour":4821,"St":4822,"形象":4823,"è´¡":4824,"åºĬ":4825,"ĠSc":4826,"agra":4827,"羣æŃ£":4828,"oint":4829,"ids":4830,"arent":4831,"éĵ¶":4832,"èģĬ":4833,"Ġregular":4834,"ä¼ĺç§Ģ":4835,"Ġcolle":4836,"çĸij":4837,"Ġsubject":4838,"Ġgreater":4839,"Ġstore":4840,"åŁ¹è®Ń":4841,"Ġimag":4842,"Ġansw":4843,"ä½Ļ":4844,"Ġspot":4845,"åĪĨåŃIJ":4846,"Ġaudience":4847,"pet":4848,"Ġvers":4849,"Ġtrail":4850,"åĭĩ":4851,"erous":4852,"Ġguidance":4853,"Ġspeech":4854,"åĵ²":4855,"æĺ¯çͱ":4856,"è´¡çĮ®":4857,"åIJĪéĢĤçļĦ":4858,"设æĸ½":4859,"ä»ĸ人":4860,"ensive":4861,"å̾":4862,"aling":4863,"Ġprojects":4864,"å³":4865,"Ġtakes":4866,"绩":4867,"That":4868,"Ġbro":4869,"ived":4870,"Ġ&":4871,"åĿIJ":4872,"placement":4873,"è¿ŀæİ¥":4874,"çļĦ社":4875,"ĠTra":4876,"Ġrelax":4877,"ufact":4878,"éģį":4879,"Ġsurv":4880,"åı£åij³":4881,"Ġcreativity":4882,"of":4883,"å¨ģ":4884,"çļĦçł":4885,"Ġbreath":4886,"Ġplaces":4887,"Ġdescrib":4888,"èĭ±è¯Ń":4889,"Ġdamage":4890,"oration":4891,"为æĤ¨":4892,"ift":4893,"Ġcase":4894,"å¹´é¾Ħ":4895,"Ġpress":4896,"çĶľ":4897,"éĩİ":4898,"æĹħ游":4899,"Ġtaken":4900,"ined":4901,"Ġconcept":4902,"æĴŃ":4903,"Ġinteresting":4904,"è·µ":4905,"Ġsea":4906,"60":4907,"Ġfoot":4908,"ĠName":4909,"Ġresearchers":4910,"éĢģ":4911,"Ġwee":4912,");":4913,"çļĦåħ³éĶ®":4914,"ä¼½":4915,"elebr":4916,"å¡ij":4917,"We":4918,"ç»ı常":4919,"Ġpopulations":4920,"åħ¬å¼ı":4921,"orn":4922,"çĩĥ":4923,"人çĶŁ":4924,"17":4925,"æİ¥åıĹ":4926,"Ġlocation":4927,"Ġinequ":4928,"Ġintervent":4929,"Ġinterested":4930,"Ġdefinitely":4931,"Ġassistance":4932,"è¿Ļä¸Ģ":4933,"åIJĪåIJĮ":4934,"ä¼ĺåĬ¿":4935,"çļĦå·¥ä½ľ":4936,"Ġ12":4937,"Ġmov":4938,"åģı":4939,"åŃĺåĤ¨":4940,"usive":4941,"æĹı":4942,"ï¼īï¼Į":4943,"Ġgas":4944,"Ġinterests":4945,"æ¸ħæĻ°":4946,"Ġgard":4947,"çĸ«":4948,"Ġsay":4949,"夫":4950,"ges":4951,"èIJ¨":4952,"ä¸ļåĬ¡":4953,"个æĢ§":4954,"åIJ¯":4955,"Ġengagement":4956,"Ġbig":4957,"éľĢè¦ģèĢĥèĻij":4958,"Ġprinci":4959,"åij¨åĽ´":4960,"Ġopportunity":4961,"çģ¾":4962,"èĹı":4963,"rel":4964,"缺çĤ¹":4965,"Ġhappy":4966,"åĴĮåħ¶ä»ĸ":4967,"ava":4968,"Ġestablish":4969,"鸡èĽĭ":4970,"iking":4971,"ĠTrans":4972,"rastructure":4973,"forest":4974,"èİ·åıĸ":4975,"èĦļ":4976,"inally":4977,"èµı":4978,"Ġdelicious":4979,"Ġresults":4980,"è§Ĥå¯Ł":4981,"å®ŀè·µ":4982,"Ġlast":4983,"Ġpolit":4984,"æĢ§èĥ½":4985,"For":4986,"bi":4987,"çĽ¸ä¿¡":4988,"ffee":4989,"Ġphr":4990,"Ġforest":4991,"elling":4992,"æµģè¡Į":4993,"atic":4994,"大家":4995,"ĠInst":4996,"æķ°åѦ":4997,"æī©":4998,"å®Įåħ¨":4999,"å¼ķèµ·":5000,"ese":5001,"转æį¢":5002,"Ġaffected":5003,"Ġrobotics":5004,"综ä¸Ĭ":5005,"Ġprop":5006,"让人":5007,"æ²³":5008,"ä¸ŃæľĢ":5009,"Ġautonomous":5010,"Ġhaving":5011,"Ġtrip":5012,"ury":5013,"Ġbiased":5014,"Ġconsiderations":5015,"Ġparticular":5016,"åįł":5017,"æİ¨å¹¿":5018,"Ġinitiatives":5019,"ials":5020,"åij³éģĵ":5021,"Ġtreatments":5022,"Ġemphas":5023,"çĭ¬çī¹çļĦ":5024,"Ġlay":5025,"æĶ¿çŃĸ":5026,"æĢİä¹Ī":5027,"ronic":5028,"play":5029,"Ġcook":5030,"è¿Ľåħ¥":5031,"è½®":5032,"Ġvolunte":5033,"Ġrain":5034,"ĠMon":5035,"Ġconsumption":5036,"èĽĭçϽ":5037,"ĠSoc":5038,"壤":5039,"Ġroutine":5040,"Ġimproved":5041,"To":5042,"人çī©":5043,"读èĢħ":5044,"Ġgoal":5045,"广åijĬ":5046,"éķ¿æľŁ":5047,"Ġey":5048,"He":5049,"Ġoutdo":5050,"Ġcuis":5051,"Ġaway":5052,"Ġbooks":5053,"Ġtopic":5054,"大åĪ©":5055,"house":5056,"Ġones":5057,"ç§Ł":5058,"':":5059,"æĪ¿å±ĭ":5060,"ç§»åĬ¨":5061,"Ġdisasters":5062,"ests":5063,"illing":5064,"绿èī²":5065,"åĵ²åѦ":5066,"æĪIJåĪĨ":5067,"Ġoccur":5068,"ľä¼½":5069,"åľŁå£¤":5070,"çļĦ主è¦ģ":5071,"çݰå®ŀ":5072,"Ġanimal":5073,"é¢Ĩ导":5074,"Ġviews":5075,"éĤ®":5076,"æ°§åĮĸ":5077,"athy":5078,"éģĵå¾·":5079,"社交åªĴä½ĵ":5080,"ĠPersonal":5081,"ĽåĽ´":5082,"Ġpurch":5083,"Ġcountry":5084,"Ġremind":5085,"寸":5086,"Ġrights":5087,"çļĦçݯå¢ĥ":5088,"ĠPr":5089,"Ġline":5090,"ibr":5091,"驾":5092,"Ġmaj":5093,"Ġovercome":5094,"Ġnext":5095,"æīĢè¿°":5096,"è§Ħå®ļ":5097,"Ġinteractions":5098,"Ġconflic":5099,"Ġwhy":5100,"ç³»åĪĹ":5101,"å°¼":5102,"ibly":5103,"çīĽå¥¶":5104,"Ġresponses":5105,"ses":5106,"åѦä¼ļ":5107,"bol":5108,"Ġstandards":5109,"ulner":5110,"对è¯ĿåĨħ容":5111,"lished":5112,"çļĦæĢ§":5113,"çĶŁæĢģç³»ç»Ł":5114,"ann":5115,"æĥħåĨµä¸ĭ":5116,"寻æ±Ĥ":5117,"Ġhold":5118,"den":5119,"åįĥ":5120,"Ġmention":5121,"ĠMany":5122,"缴åΰ":5123,"éģĹ":5124,"hel":5125,"Ġbelieve":5126,"aries":5127,"æľīä¸Ģ个":5128,"13":5129,"Ġatmosphere":5130,"Ġmor":5131,"æĹ¥æľŁ":5132,"ä¹ħ":5133,"ä½łå¥½":5134,"Ġaddressing":5135,"ĠâĢĵ":5136,"çļĦåľ°æĸ¹":5137,"ming":5138,"Ġcannot":5139,"Ġmanufact":5140,"Ġpie":5141,"icing":5142,"Ġstudies":5143,"ç¾İåij³":5144,"ĠAmerican":5145,"ĠNLP":5146,"Ġaccording":5147,"mselves":5148,"èĦĤ":5149,"èĩªä¿¡":5150,"æīĢéľĢ":5151,"Ġthemselves":5152,"Ġremote":5153,"åŁ¹åħ»":5154,"å®īæİĴ":5155,"ä½łéľĢè¦ģ":5156,"Ġregard":5157,"iring":5158,"è¯ĨåĪ«":5159,"Ġarticle":5160,"æģĴ":5161,"æĢ»çļĦæĿ¥":5162,"Ġalign":5163,"æ±ł":5164,"tenance":5165,"faction":5166,"åĬ¨ä½ľ":5167,"çļĦç©":5168,"缩":5169,"æĢ¥":5170,"Ġ100":5171,"Ġtesting":5172,"åŃĹæ¯į":5173,"å¹´è½»":5174,"åζéĢł":5175,"Ġswe":5176,"å°º":5177,"hens":5178,"æ°´æŀľ":5179,"Ġinfrastructure":5180,"èī²å½©":5181,"æĢ»çļĦæĿ¥è¯´":5182,"æľīä»Ģä¹Ī":5183,"text":5184,"车è¾Ĩ":5185,"Ġpay":5186,"rop":5187,"ĊĠĠ":5188,"Ġcaused":5189,"Ġcorrect":5190,"Ġì":5191,"èĥŀ":5192,"ĠMed":5193,"ç²¾ç¥ŀ":5194,"æ°ĶåĢĻåıĺåĮĸ":5195,"ĠRed":5196,"äºĴèģĶç½ij":5197,"Ġengage":5198,"åĪĨ为":5199,"ĠData":5200,"Ġfull":5201,"enc":5202,"éĩįæĸ°":5203,"æŃ£ç¡®çļĦ":5204,"çļĦæ°Ķ":5205,"åıĮæĸ¹":5206,"Ġcomes":5207,"åı¤ä»£":5208,"æŁIJäºĽ":5209,"åijĪçݰ":5210,"Ġtoday":5211,"aged":5212,"æĪijåı¯ä»¥":5213,"æĹ¥å¸¸":5214,"æ»ij":5215,"Ġclin":5216,"Ġ\\":5217,"Ġobs":5218,"Ġartificial":5219,"Ġexcell":5220,"çļĦç¬":5221,"alls":5222,"Ġproduce":5223,"ĠDes":5224,"oss":5225,"è¹Ī":5226,"Ġdraw":5227,"Ġletter":5228,"Ġadvice":5229,"Ġhighly":5230,"çĬ¯":5231,"综ä¸ĬæīĢè¿°":5232,"满æĦı":5233,"Ġprinciples":5234,"èĮĦ":5235,"Ġfeelings":5236,"çļĦæ´":5237,"Ġhom":5238,"Ġfail":5239,"Ġcrop":5240,"å§ľ":5241,"Ġquestion":5242,"Ġdisabilities":5243,"èĪŀè¹Ī":5244,"Ġimplications":5245,"ral":5246,"Ġsing":5247,"40":5248,"Ġfamil":5249,"Ġgovernments":5250,"Ġrecord":5251,"å½¢çĬ¶":5252,"Ġbegin":5253,"ises":5254,"çļĦæĥ³":5255,"achine":5256,"è°±":5257,"Ġvulner":5258,"Ġproper":5259,"Ġoversight":5260,"è´ŁéĿ¢":5261,"Ġemail":5262,"Ġnews":5263,"Ġexploring":5264,"Ġfavor":5265,"楼":5266,"å®ľ":5267,"Ġunivers":5268,"å·®å¼Ĥ":5269,"ï¼īãĢĤ":5270,"è§£åĨ³éĹ®é¢ĺ":5271,"Ġfamous":5272,"gn":5273,"Ġmessage":5274,"atitude":5275,"Ġcra":5276,"Ġcover":5277,"æ·±åĪ»":5278,"åı¯ä»¥éĢīæĭ©":5279,"çĶŁæ´»ä¸Ń":5280,"ç§įç±»":5281,"Ġsmart":5282,"onstr":5283,"vey":5284,"çͲ":5285,"Ġregularly":5286,"ĠSm":5287,"æĦŁè§ī":5288,"Ġthought":5289,"Ġexh":5290,"cure":5291,"ç»ĺ":5292,"认è¯Ĩ":5293,"Ġold":5294,"æĦī":5295,"称为":5296,"Ġfields":5297,"Ġconsist":5298,"ãģ":5299,"ç»Ĩèĥŀ":5300,"Ġhours":5301,"80":5302,"alking":5303,"è§īå¾Ĺ":5304,"ç»Ŀ":5305,"ä½łä»¬":5306,"ĠEnglish":5307,"Ġsignificantly":5308,"Ġsource":5309,"Ġant":5310,"Ġeducational":5311,"Ġtask":5312,"Ġhandle":5313,"æIJľ":5314,"ĠSp":5315,"Ġcalled":5316,"Ġterms":5317,"æ²ī":5318,"Ġwin":5319,"duction":5320,"Ġmodern":5321,"Ġcuisine":5322,"å¥Ĺ":5323,"触":5324,"olutely":5325,"ç«¥":5326,"pite":5327,"Ġfelt":5328,"Ġcompre":5329,"Ġwond":5330,"è¿IJè¡Į":5331,"Ġresil":5332,"çĽ¸ä¼¼":5333,"éĩijèŀį":5334,"çαæĥħ":5335,"ç¬Ķ":5336,"èĪª":5337,"è°Ī":5338,"åĬĽçļĦ":5339,"æľīæīĢ":5340,"æ½ľ":5341,"ulate":5342,"Ġdetection":5343,"å®£ä¼ł":5344,"Ġmatter":5345,"éĩıåŃIJ":5346,"Write":5347,"ç»ĵåIJĪ":5348,"ç»ıè¿ĩ":5349,"Ġdevelopers":5350,"èª":5351,"Ġ---":5352,"人éĻħ":5353,"çѾ":5354,"ï¼ļâĢľ":5355,"Ġinnovative":5356,"ãĢĤâĢĿ":5357,"å½¼":5358,"饼":5359,"è¿ĩ度":5360,"Ġplanet":5361,"åħ°":5362,"å¸ģ":5363,"æķ¬":5364,"Ġlegal":5365,"Ġlot":5366,"æĪIJ为äºĨ":5367,"iate":5368,"Ġmis":5369,"åģĩ设":5370,"çļĦæĸĩ竳":5371,"ĠCompan":5372,"Ġdoc":5373,"Ġcareful":5374,"Ġever":5375,"æĪij们å°Ĩ":5376,"ä¾ĭåŃIJ":5377,"ä¹³":5378,"ä½ľèĢħ":5379,"åIJ§":5380,"æļ´":5381,"Ġremember":5382,"缮çļĦ":5383,"Ġput":5384,"常è§ģçļĦ":5385,"Ġfest":5386,"建设":5387,"å®ŀç͍":5388,"Ġactive":5389,"çªĹ":5390,"outh":5391,"åİŁçIJĨ":5392,"Ġtrying":5393,"è¿·":5394,"缸åIJĮ":5395,"éħĴåºĹ":5396,"Another":5397,"æľĢä½³":5398,"Ġanalytics":5399,"Ġperpet":5400,"ipment":5401,"Ġå¦Ĥæŀľ":5402,"è§Ĥä¼Ĺ":5403,"Ġcelebr":5404,"Ġheav":5405,"Ġmeditation":5406,"大æ°Ķ":5407,"And":5408,"ä¸įéĶĻ":5409,"Ġwhether":5410,"set":5411,"Ġdemonstr":5412,"ä¸Ģ款":5413,"æĶ¶éĽĨ":5414,"éĻIJåζ":5415,"Ġing":5416,"Ġrevolution":5417,"çľģ":5418,"Ġscience":5419,"缮åīį":5420,"Ġthinking":5421,"±ä¹IJ":5422,"课ç¨ĭ":5423,"Ġpack":5424,"Ġimage":5425,"loc":5426,"Ġstories":5427,"uck":5428,"Ġsatisfaction":5429,"Ġcollection":5430,"ho":5431,"èµŀ":5432,"éĿ¢ä¸´":5433,"Ġla":5434,"Ġsymbol":5435,"Ġemb":5436,"Ġhabitats":5437,"Ġlower":5438,"Ġcontinues":5439,"éľĩ":5440,"åĵĪ":5441,"ĠTake":5442,"Ġenvironments":5443,"Ġthree":5444,"Ġenc":5445,"ĠAcc":5446,"æĦıåij³":5447,"åݨ":5448,"chan":5449,"ĠHum":5450,"Ġtrue":5451,"åĪĩæĪIJ":5452,"sing":5453,"âĢĶâĢĶ":5454,"åĩºæĿ¥":5455,"Ġregion":5456,"Ġinterpre":5457,"Ġdiagnosis":5458,"éŀ":5459,"Ġdoing":5460,"Ġrun":5461,"Ġcoffee":5462,"Ġmajor":5463,"Ġmindfulness":5464,"Ġaffordable":5465,"çϾ":5466,"Ġdetailed":5467,"éĿŀ常éĩįè¦ģçļĦ":5468,"çļĦæ²ŁéĢļ":5469,"çļĦæķħ":5470,"åĢĴåħ¥":5471,"Ġthemes":5472,"Ġnetwork":5473,"ï¼īï¼ļ":5474,"ĠUnited":5475,"çļĦæĮĩ":5476,"orts":5477,"åį«çĶŁ":5478,"Ġplanning":5479,"æĥł":5480,"åīª":5481,"ĠProv":5482,"çļĦåºĶç͍":5483,"Ġperi":5484,"Ġaccountable":5485,"çīĻ":5486,"çļĦçģ":5487,"Ġchoice":5488,"ĠComm":5489,"idents":5490,"çļĦå®īåħ¨":5491,"å¹¶ä¸į":5492,"太éĺ³ç³»":5493,"Ġreceive":5494,"Ġclose":5495,"çļĦæĹ¶åĢĻ":5496,"Ġchanging":5497,"ä»·å̼è§Ĥ":5498,"Ġperpetu":5499,"Ġseason":5500,"Ġmen":5501,"Ġlearned":5502,"Ġsituation":5503,"Ġreplace":5504,"head":5505,"让æĪij":5506,"åľ¨ä¸Ģèµ·":5507,"çļĦ空":5508,"éľ²":5509,"Ġenough":5510,"å±ķçݰ":5511,"Ġleaders":5512,"ancing":5513,"Ġtemperature":5514,"åı«":5515,"Ġ30":5516,"æĦıåij³çĿĢ":5517,"æ±ĩ":5518,"ĠGovern":5519,"Ġfocused":5520,"uro":5521,"Ġsimple":5522,"Ġhiking":5523,"æ¯Ĵ":5524,"Ġcomprehens":5525,"äºĪ":5526,"Ġcreated":5527,"cond":5528,"页":5529,"ĠWor":5530,"è¯ģæį®":5531,"Ġworkplace":5532,"Ġcharacters":5533,"çļĦ设计":5534,"Ġmechan":5535,"ĠDis":5536,"ç¥ŀç§ĺ":5537,"å·ŀ":5538,"ĠOn":5539,"= seq_len, f"位置编码长度 {pos_emb.shape[0]} 小于序列长度 {seq_len}" + assert pos_emb.shape[1] == head_dim, f"位置编码维度 {pos_emb.shape[1]} 与头维度 {head_dim} 不匹配" + + # 截取需要的位置编码长度 + pos_emb = pos_emb[:seq_len] + + # 将pos_emb调整为广播形状 [1, seq_len, 1, head_dim] + pos_emb = pos_emb.unsqueeze(0).unsqueeze(2) + + # 将head_dim分成两半 + half_head_dim = head_dim // 2 + + # 提取cos和sin值(偶数索引是cos,奇数索引是sin) + cos = pos_emb[..., 0::2] + sin = pos_emb[..., 1::2] + + # 将xq和xk重新排列,以便进行旋转操作 + # 原始复数版本中,xq和xk被重塑为复数张量,其中实部和虚部交错排列 + # 在实数版本中,我们需要将偶数索引和奇数索引分开处理 + + # 分离偶数和奇数索引 + xq_even = xq[..., 0::2] # 偶数索引,对应复数的实部 + xq_odd = xq[..., 1::2] # 奇数索引,对应复数的虚部 + xk_even = xk[..., 0::2] + xk_odd = xk[..., 1::2] + + # 应用旋转(等价于复数乘法) + # (a + bi)(cos + sin*i) = (a*cos - b*sin) + (a*sin + b*cos)i + # 其中a是偶数索引,b是奇数索引 + xq_out_even = xq_even * cos - xq_odd * sin # 新的偶数索引(实部) + xq_out_odd = xq_even * sin + xq_odd * cos # 新的奇数索引(虚部) + xk_out_even = xk_even * cos - xk_odd * sin + xk_out_odd = xk_even * sin + xk_odd * cos + + # 重新组合偶数和奇数索引 + xq_out = torch.zeros_like(xq) + xk_out = torch.zeros_like(xk) + xq_out[..., 0::2] = xq_out_even + xq_out[..., 1::2] = xq_out_odd + xk_out[..., 0::2] = xk_out_even + xk_out[..., 1::2] = xk_out_odd + + return xq_out.type_as(xq), xk_out.type_as(xk) + +# repeat_kv 函数用于重复键值对。 +def repeat_kv(x: torch.Tensor, n_rep: int) -> torch.Tensor: + """torch.repeat_interleave(x, dim=2, repeats=n_rep)""" + bs, slen, n_kv_heads, head_dim = x.shape + if n_rep == 1: + return x + return ( + x[:, :, :, None, :] + .expand(bs, slen, n_kv_heads, n_rep, head_dim) + .reshape(bs, slen, n_kv_heads * n_rep, head_dim) + ) + + +class Attention(nn.Module): + def __init__(self, args: LMConfig): + super().__init__() + self.n_kv_heads = args.n_heads if args.n_kv_heads is None else args.n_kv_heads + assert args.n_heads % self.n_kv_heads == 0 + self.n_local_heads = args.n_heads + self.n_local_kv_heads = self.n_kv_heads + self.n_rep = self.n_local_heads // self.n_local_kv_heads + self.head_dim = args.dim // args.n_heads + self.wq = nn.Linear(args.dim, args.n_heads * self.head_dim, bias=False) + self.wk = nn.Linear(args.dim, self.n_kv_heads * self.head_dim, bias=False) + self.wv = nn.Linear(args.dim, self.n_kv_heads * self.head_dim, bias=False) + self.wo = nn.Linear(args.n_heads * self.head_dim, args.dim, bias=False) + self.attn_dropout = nn.Dropout(args.dropout) + self.resid_dropout = nn.Dropout(args.dropout) + self.dropout = args.dropout + self.flash = hasattr(torch.nn.functional, 'scaled_dot_product_attention') and args.flash_attn + # print("WARNING: using slow attention. Flash Attention requires PyTorch >= 2.0") + mask = torch.full((1, 1, args.max_seq_len, args.max_seq_len), float("-inf")) + mask = torch.triu(mask, diagonal=1) + self.register_buffer("mask", mask, persistent=False) + + def forward(self, + x: torch.Tensor, + pos_cis: torch.Tensor, + db_value=None): + bsz, seq_len, _ = x.shape #bsz: 批量大小, seq_len: 序列长度, _: 隐藏维度 + xq, xk, xv = self.wq(x), self.wk(x), self.wv(x) #将输入张量x分别通过线性层wq, wk, wv进行变换,得到查询、键和值。 + xq = xq.view(bsz, seq_len, self.n_local_heads, self.head_dim) #将变换后的张量xq重塑为形状为(bsz, seq_len, n_local_heads, head_dim)的形状。 + xk = xk.view(bsz, seq_len, self.n_local_kv_heads, self.head_dim) #将变换后的张量xk重塑为形状为(bsz, seq_len, n_local_kv_heads, head_dim)的形状。 + xv = xv.view(bsz, seq_len, self.n_local_kv_heads, self.head_dim) #将变换后的张量xv重塑为形状为(bsz, seq_len, n_local_kv_heads, head_dim)的形状。 + + # 应用旋转位置编码(使用实数版本) + xq, xk = apply_rotary_emb_real(xq, xk, pos_cis) + # kv_cache实现 REMOVED + # if past_key_value is not None: + # xk = torch.cat([past_key_value[0], xk], dim=1) + # xv = torch.cat([past_key_value[1], xv], dim=1) + # past_kv = (xk, xv) if use_cache else None + + # 重复键值对 + xq, xk, xv = ( + xq.transpose(1, 2), + repeat_kv(xk, self.n_rep).transpose(1, 2), + repeat_kv(xv, self.n_rep).transpose(1, 2) + ) + + # 如果提供了db_value,根据头的数量调整它的形状并与xv合并 + if db_value is not None: + # 确保db_value的形状与xv兼容,假设db_value形状为[B, N, H, D] + if db_value.ndim == 4: # [B, N, H, D] + db_value = db_value.transpose(1, 2) # -> [B, H, N, D] + + # 检查是否需要调整D维度 + if db_value.shape[-1] != xv.shape[-1]: + # 如果db_value的维度与xv不同,可以添加一个投影层 + # 或者在这里使用简单的调整方法 + # 这里我们简单地通过均值池化或重复来调整维度 + if db_value.shape[-1] > xv.shape[-1]: + # 降维 + factor = db_value.shape[-1] // xv.shape[-1] + db_value = db_value.view(bsz, self.n_local_heads, seq_len, factor, xv.shape[-1]) + db_value = db_value.mean(dim=3) + else: + # 升维 + factor = xv.shape[-1] // db_value.shape[-1] + db_value = db_value.unsqueeze(-1).repeat(1, 1, 1, 1, factor) + db_value = db_value.view(bsz, self.n_local_heads, seq_len, xv.shape[-1]) + + # 将db_value与xv相加或融合 + # 这里我们简单地将它们相加,但你也可以使用其他融合方法 + xv = xv + db_value + + # 使用Flash Attention + if self.flash and seq_len != 1: + dropout_p = self.dropout if self.training else 0.0 + output = F.scaled_dot_product_attention( + xq, xk, xv, + attn_mask=None, + dropout_p=dropout_p, + is_causal=True + ) + else: + scores = (xq @ xk.transpose(-2, -1)) / math.sqrt(self.head_dim) + scores += self.mask[:, :, :seq_len, :seq_len] + scores = F.softmax(scores.float(), dim=-1).type_as(xq) + scores = self.attn_dropout(scores) + output = scores @ xv + + output = output.transpose(1, 2).reshape(bsz, seq_len, -1) + output = self.resid_dropout(self.wo(output)) + return output + + + + +class CrossAttention(nn.Module): + def __init__( + self, + config + ): + super().__init__() + self.config = config + self.num_heads = 8 + self.head_dim = self.config.dim // self.num_heads + self.to_q = nn.Linear(self.config.dim, self.config.dim, bias=False) + self.to_k = nn.Linear(self.config.dim, self.config.dim, bias=False) + self.to_v = nn.Linear(self.config.dim, self.config.dim, bias=False) + + self.to_out = nn.Linear(self.config.dim, self.config.dim, bias=False) + + def forward(self, x, db, context_mask=None, pos_emb=None): + batch_size = x.size(0) + + # 分离多头 + q = self.to_q(x).view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) + k = self.to_k(db).view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) + v = self.to_v(db).view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) + + if pos_emb is not None: + pos_emb = pos_emb.view(batch_size, -1, self.num_heads, self.head_dim).transpose(1, 2) + q = q + pos_emb + k = k + pos_emb + v = v + pos_emb + + attn_scores = torch.matmul(q, k.transpose(-2, -1)) / math.sqrt(self.head_dim) + + if context_mask is not None: + expanded_mask = context_mask.unsqueeze(1).expand(-1, self.num_heads, -1, -1) + attn_scores = attn_scores.masked_fill(expanded_mask == 0, -1e10) + + attn_weights = F.softmax(attn_scores, dim=-1) + + context = torch.matmul(attn_weights, v) + + context = context.transpose(1, 2).contiguous().view(batch_size, -1, self.config.dim) + + context = self.to_out(context) + + return context + +class FeedForward(nn.Module): + def __init__(self, config: LMConfig): + super().__init__() + if config.hidden_dim is None: + hidden_dim = 4 * config.dim + hidden_dim = int(2 * hidden_dim / 3) + config.hidden_dim = config.multiple_of * ((hidden_dim + config.multiple_of - 1) // config.multiple_of) + self.w1 = nn.Linear(config.dim, config.hidden_dim, bias=False) + self.w2 = nn.Linear(config.hidden_dim, config.dim, bias=False) + self.w3 = nn.Linear(config.dim, config.hidden_dim, bias=False) + self.dropout = nn.Dropout(config.dropout) + + def forward(self, x): + return self.dropout(self.w2(F.silu(self.w1(x)) * self.w3(x))) + + +class MoEGate(nn.Module): + def __init__(self, config: LMConfig): + super().__init__() + self.config = config + self.top_k = config.num_experts_per_tok + self.n_routed_experts = config.n_routed_experts + + self.scoring_func = config.scoring_func + self.alpha = config.aux_loss_alpha + self.seq_aux = config.seq_aux + + self.norm_topk_prob = config.norm_topk_prob + self.gating_dim = config.dim + self.weight = nn.Parameter(torch.empty((self.n_routed_experts, self.gating_dim))) + self.reset_parameters() + + def reset_parameters(self) -> None: + import torch.nn.init as init + init.kaiming_uniform_(self.weight, a=math.sqrt(5)) + + def forward(self, hidden_states): + bsz, seq_len, h = hidden_states.shape + hidden_states = hidden_states.view(-1, h) + logits = F.linear(hidden_states, self.weight, None) + if self.scoring_func == 'softmax': + scores = logits.softmax(dim=-1) + else: + raise NotImplementedError(f'insupportable scoring function for MoE gating: {self.scoring_func}') + + topk_weight, topk_idx = torch.topk(scores, k=self.top_k, dim=-1, sorted=False) + + if self.top_k > 1 and self.norm_topk_prob: + denominator = topk_weight.sum(dim=-1, keepdim=True) + 1e-20 + topk_weight = topk_weight / denominator + + if self.training and self.alpha > 0.0: + scores_for_aux = scores + aux_topk = self.top_k + topk_idx_for_aux_loss = topk_idx.view(bsz, -1) + if self.seq_aux: + scores_for_seq_aux = scores_for_aux.view(bsz, seq_len, -1) + ce = torch.zeros(bsz, self.n_routed_experts, device=hidden_states.device) + ce.scatter_add_(1, topk_idx_for_aux_loss, + torch.ones(bsz, seq_len * aux_topk, device=hidden_states.device)).div_( + seq_len * aux_topk / self.n_routed_experts) + aux_loss = (ce * scores_for_seq_aux.mean(dim=1)).sum(dim=1).mean() * self.alpha + else: + mask_ce = F.one_hot(topk_idx_for_aux_loss.view(-1), num_classes=self.n_routed_experts) + ce = mask_ce.float().mean(0) + Pi = scores_for_aux.mean(0) + fi = ce * self.n_routed_experts + aux_loss = (Pi * fi).sum() * self.alpha + else: + aux_loss = 0 + return topk_idx, topk_weight, aux_loss + + +class MOEFeedForward(nn.Module): + def __init__(self, config: LMConfig): + super().__init__() + self.config = config + self.experts = nn.ModuleList([ + FeedForward(config) + for _ in range(config.n_routed_experts) + ]) + self.gate = MoEGate(config) + if config.n_shared_experts is not None: + self.shared_experts = FeedForward(config) + + def forward(self, x): + identity = x + orig_shape = x.shape + bsz, seq_len, _ = x.shape + # 使用门控机制选择专家 + topk_idx, topk_weight, aux_loss = self.gate(x) + x = x.view(-1, x.shape[-1]) + flat_topk_idx = topk_idx.view(-1) + if self.training: + x = x.repeat_interleave(self.config.num_experts_per_tok, dim=0) + y = torch.empty_like(x, dtype=torch.float16) + for i, expert in enumerate(self.experts): + y[flat_topk_idx == i] = expert(x[flat_topk_idx == i]).to(y.dtype) # 确保类型一致 + y = (y.view(*topk_weight.shape, -1) * topk_weight.unsqueeze(-1)).sum(dim=1) + y = y.view(*orig_shape) + else: + y = self.moe_infer(x, flat_topk_idx, topk_weight.view(-1, 1)).view(*orig_shape) + if self.config.n_shared_experts is not None: + y = y + self.shared_experts(identity) + self.aux_loss = aux_loss + return y + + @torch.no_grad() + def moe_infer(self, x, flat_expert_indices, flat_expert_weights): + expert_cache = torch.zeros_like(x) + idxs = flat_expert_indices.argsort() + tokens_per_expert = flat_expert_indices.bincount().cpu().numpy().cumsum(0) + token_idxs = idxs // self.config.num_experts_per_tok + # 当tokens_per_expert = [6, 15, 20, 26],tokens_per_expert.shape[0]即为专家数量(此时为4) + # 且token_idxs = [3, 7, 19, 21, 24, 25, 4, 5, 6, 10, 11, 12...] 时 + # 意味token_idxs[:6] -> [3, 7, 19, 21, 24, 25]这6个位置属于专家0处理的token(每个token有可能被多个专家处理,这取决于num_experts_per_tok) + # 接下来9个位置token_idxs[6:15] -> [4, 5, 6, 10, 11, 12...]属于专家1处理的token...依此类推 + for i, end_idx in enumerate(tokens_per_expert): + start_idx = 0 if i == 0 else tokens_per_expert[i - 1] + if start_idx == end_idx: + continue + expert = self.experts[i] + exp_token_idx = token_idxs[start_idx:end_idx] + expert_tokens = x[exp_token_idx] + expert_out = expert(expert_tokens).to(expert_cache.dtype) + expert_out.mul_(flat_expert_weights[idxs[start_idx:end_idx]]) + expert_cache.scatter_add_(0, exp_token_idx.view(-1, 1).repeat(1, x.shape[-1]), expert_out) + + return expert_cache + + +class MiniMindBlock(nn.Module): + def __init__(self, layer_id: int, config: LMConfig): + super().__init__() + self.n_heads = config.n_heads + self.dim = config.dim + self.head_dim = config.dim // config.n_heads + self.attention = Attention(config) + self.cross_att = CrossAttention(config) + + self.layer_id = layer_id + self.attention_norm = RMSNorm(config.dim, eps=config.norm_eps) + self.ffn_norm = RMSNorm(config.dim, eps=config.norm_eps) + self.feed_forward = FeedForward(config) if not config.use_moe else MOEFeedForward(config) + + # 假设num_experts是已定义的总专家数量的平方根 + + + # 查询生成的参数 + + + # 创建查询生成模块 + # if weight_down_embed is not None: + # self.to_queries = nn.Sequential( + # nn.Linear(config.dim, self.dim_key * 2, bias=False), + # # nn.Unflatten(2, (2, self.n_heads, self.dim_key)) # 替代Rearrange + # ) + + # # 超参数 + # self.product_key_topk = min(16, self.num_keys) # 确保不超过num_keys + # self.num_experts_per_head_topk = 1 # 最终每个头选取的专家数 + + def forward(self, x, db_value, pos_cis): + # import pdb;pdb.set_trace() + # db_value = None + + # # 如果有weight_down_embed,使用Product Key机制 + # if self.weight_down_embed is not None: + # # 1. 生成queries + # batch_size, seq_len, dim = x.shape + + # # collapse sequence dimension by averaging + # x_flat = x.mean(dim=1) # [batch_size, dim] + # queries = self.to_queries(x_flat) # [batch_size, 2*dim_key] + # queries = queries.reshape(batch_size, 2, self.dim_key) # [batch_size, 2, dim_key] + # queries = queries.permute(1, 0, 2) # [2, batch_size, dim_key] + + # # 2. 计算queries与keys的相似度 + # sim = torch.einsum('p b d, k p d -> p b k', queries, self.keys) + + # # 3. 在两个子空间分别做top-k + # scores_and_indices = [sim[p].topk(self.product_key_topk, dim=-1) for p in range(2)] + # scores_x, scores_y = scores_and_indices[0][0], scores_and_indices[1][0] + # indices_x, indices_y = scores_and_indices[0][1], scores_and_indices[1][1] + + # # 4. 组合两个子空间的分数和索引 + # all_scores = scores_x.unsqueeze(-1) + scores_y.unsqueeze(-2) + # all_scores = all_scores.view(*all_scores.shape[:-2], -1) + + # all_indices = (indices_x.unsqueeze(-1) * self.num_keys) + indices_y.unsqueeze(-2) + # all_indices = all_indices.view(*all_indices.shape[:-2], -1) + + # # 5. 最终top-k选择 + # scores, pk_indices = all_scores.topk(self.num_experts_per_head_topk, dim=-1) + # indices = all_indices.gather(-1, pk_indices) + + # # 6. 从embedding中获取专家值 + + # # 从embedding中获取值 + # flat_indices = indices.view(-1) # 将索引展平为一维张量 + # db_values = self.weight_down_embed(flat_indices) + + # # 重塑回原始形状 + # db_value = db_values.view(batch_size, -1, dim) + + + # 注意力计算 + h_attn = self.attention( + self.attention_norm(x), + pos_cis, + db_value=db_value + ) + + h_attn = self.cross_att(h_attn, db_value) + + # 残差连接 + h = x + h_attn + + # 前馈神经网络 + out = h + self.feed_forward(self.ffn_norm(h)) + return out + +class ExtractDB(nn.Module): + def __init__(self,params): + # 修改专家数量和知识维度,确保能开方 + super().__init__() + self.batch_size = None + self.dim = params.dim + self.dim_key = self.dim // 2 + self.knowlwdge_num = params.knowlwdge_num # 100专家,确保是完全平方数 + # 将knowledge_dim设置为与head_dim相同,以便在attention中直接使用 + self.head_dim = params.dim // params.n_heads + self.knowledge_length = params.knowlwdge_length*params.dim + + # 使用register_buffer代替nn.Parameter,避免梯度问题 + self.register_buffer('weight_down_embed', torch.randn(self.knowlwdge_num, self.knowledge_length) * 0.02) + + self.num_keys = int(math.sqrt(self.knowlwdge_num)) if self.knowlwdge_num > 0 else 0 + self.product_key_topk = min(16, self.num_keys) + self.keys = nn.Parameter(torch.randn(self.num_keys, 2, self.dim_key) * 0.02) + self.num_experts_per_head_topk = 1 + self.to_queries = nn.Sequential( + nn.Linear(params.dim, self.dim_key * 2, bias=False), + ) + + def q_to_k(self,x): + # 1. 生成queries + self.batch_size, seq_len, dim = x.shape + + # collapse sequence dimension by averaging + x_flat = x.mean(dim=1) # [batch_size, dim] + + queries = self.to_queries(x_flat) # [batch_size, 2*dim_key] + queries = queries.reshape(self.batch_size, 2, self.dim_key) # [batch_size, 2, dim_key] + queries = queries.permute(1, 0, 2) # [2, batch_size, dim_key] + + # 2. 计算queries与keys的相似度 + sim = torch.einsum('p b d, k p d -> p b k', queries, self.keys) + + # 3. 在两个子空间分别做top-k + scores_and_indices = [sim[p].topk(self.product_key_topk, dim=-1) for p in range(2)] + scores_x, scores_y = scores_and_indices[0][0], scores_and_indices[1][0] + indices_x, indices_y = scores_and_indices[0][1], scores_and_indices[1][1] + + # 4. 组合两个子空间的分数和索引 + all_scores = scores_x.unsqueeze(-1) + scores_y.unsqueeze(-2) + all_scores = all_scores.view(*all_scores.shape[:-2], -1) + + all_indices = (indices_x.unsqueeze(-1) * self.num_keys) + indices_y.unsqueeze(-2) + all_indices = all_indices.view(*all_indices.shape[:-2], -1) + + # 5. 最终top-k选择 + scores, pk_indices = all_scores.topk(self.num_experts_per_head_topk, dim=-1) + indices = all_indices.gather(-1, pk_indices) + flat_indices = indices.view(-1) + return flat_indices + + def get_data(self, index): + # 直接从GPU获取embedding + db_values = self.weight_down_embed[index] + db_value = db_values.view(self.batch_size, -1, self.dim) + return db_value + + @torch.no_grad() + def updata_value(self, k, v): + # 直接更新buffer上的值 (不需要梯度) + v_reshaped = v.view(v.size(0), -1) + # 确保数据类型匹配 + v_reshaped = v_reshaped.to(dtype=self.weight_down_embed.dtype) + self.weight_down_embed[k] = v_reshaped + + + +class MiniMindLM(PreTrainedModel): + config_class = LMConfig + + def __init__(self, params: LMConfig = None): + self.params = params or LMConfig() + super().__init__(self.params) + self.vocab_size, self.n_layers = params.vocab_size, params.n_layers + self.tok_embeddings = nn.Embedding(params.vocab_size, params.dim) + self.dropout = nn.Dropout(params.dropout) + # 移除旧的weight_down_embed声明 + self.extract_db = ExtractDB(self.params) + + # 将self.weight_down_embed传递给每个MiniMindBlock + self.layers = nn.ModuleList([MiniMindBlock(l, params) for l in range(self.n_layers)]) + self.norm = RMSNorm(params.dim, eps=params.norm_eps) + self.output = nn.Linear(params.dim, params.vocab_size, bias=False) + self.tok_embeddings.weight = self.output.weight + + # Calculate input dimension + input_dim = (self.params.max_seq_len-1)*self.params.n_layers + # Use a bottleneck architecture to reduce parameters + bottleneck_dim = 256 # Significantly smaller bottleneck dimension + + # Factorized shared downsampling using two smaller convolutions + self.shared_downsample = nn.Sequential( + # First reduce input dimension to bottleneck + nn.Conv1d(input_dim, bottleneck_dim, kernel_size=1, padding='same'), + nn.ReLU(), # Non-linearity to improve representation capacity + # Then expand to target dimension + nn.Conv1d(bottleneck_dim, 128*8, kernel_size=1, padding='same') + ) + + # Specific layers for v path + self.downsample_v_specific = nn.Sequential( + nn.Conv1d(128*8, 128, kernel_size=1, padding='same'), + nn.Conv1d(128, 8, kernel_size=1, padding='same') + ) + + # Specific layers for q path + self.downsample_q_specific = nn.Sequential( + nn.Conv1d(128*8, 512, kernel_size=1, padding='same') + ) + # 使用实数版本的位置编码,避免复数张量可能导致的段错误 + self.register_buffer("pos_cis_real", + precompute_pos_cis_real(dim=params.dim // params.n_heads, theta=params.rope_theta), + persistent=False) + self.params = params + + def forward(self, + input_ids: Optional[torch.Tensor] = None, + logits_to_keep: Union[int, torch.Tensor] = 0, + **args): + start_pos = args.get('start_pos', 0) + h = self.dropout(self.tok_embeddings(input_ids)) + pos_cis_real = self.pos_cis_real[start_pos:start_pos + input_ids.size(1)] + h_list = [] + + for l, layer in enumerate(self.layers): + # 禁用数据库模式,使用固定值替代数据库查询 + if self.params.disable_db: + # 创建一个形状为[batch_size, n_layers, dim]的tensor,所有元素值为1e-4 + batch_size = h.size(0) + db_value = torch.full((batch_size, self.n_layers, self.params.dim), 1e-4, + dtype=h.dtype, device=h.device) + else: + # 正常模式,使用数据库查询 + index = self.extract_db.q_to_k(h) + db_value = self.extract_db.get_data(index) + + h = layer( + h, db_value, pos_cis_real + ) + + h_list.append(h.unsqueeze(0)) + + h_tensor = torch.cat(h_list, dim=0).permute(1, 0, 2, 3) + + # 只在非禁用数据库模式下执行数据库更新逻辑 + if not self.params.disable_db: + # 使用detach()分离计算图,避免多次反向传播 + h_tensor_detached = h_tensor.detach() + h_tensor_detached = h_tensor_detached.reshape(h_tensor_detached.shape[0], -1, self.params.dim) + + # 数据库更新逻辑与主计算图分离 + with torch.no_grad(): + # Compute shared downsampling layer once + shared_features = self.shared_downsample(h_tensor_detached) + z_v = self.downsample_v_specific(shared_features) + z_q = self.downsample_q_specific(shared_features) + z_k = self.extract_db.q_to_k(z_q) + self.extract_db.updata_value(z_k, z_v) + + slice_indices = slice(-logits_to_keep, None) if isinstance(logits_to_keep, int) else logits_to_keep + logits = self.output(self.norm(h)[:, slice_indices, :]) + aux_loss = sum(l.feed_forward.aux_loss for l in self.layers if isinstance(l.feed_forward, MOEFeedForward)) + + # 进一步简化,只保留必要的参数 + output = CausalLMOutputWithPast( + logits=logits, + ) + output.hidden_states = h + + output.aux_loss = aux_loss + + # 尝试添加其他属性(如果支持的话) + # try: + # output.hidden_states = h + # except: + # pass + + return output + + @torch.inference_mode() + def generate(self, input_ids, eos_token_id=2, max_new_tokens=1024, temperature=0.75, top_p=0.90, + stream=False, rp=1., pad_token_id=0, num_return_sequences=1, **args): + # 流式生成 + if stream: + return self._stream(input_ids, eos_token_id, max_new_tokens, temperature, top_p, rp, **args) + + # 直接生成 + generated = [] + for i in range(input_ids.size(0)): + non_pad = input_ids[i][input_ids[i] != pad_token_id].unsqueeze(0) + for _ in range(num_return_sequences): + out = self._stream(non_pad, eos_token_id, max_new_tokens, temperature, top_p, rp, **args) + tokens_list = [tokens[:, -1:] for tokens in out] + gen = torch.cat(tokens_list, dim=-1) if tokens_list else non_pad + full_sequence = torch.cat([non_pad, gen], dim=-1) + generated.append(full_sequence) + + max_length = max(seq.size(1) for seq in generated) + generated = [ + torch.cat( + [seq, torch.full((1, max_length - seq.size(1)), pad_token_id, dtype=seq.dtype, device=seq.device)], + dim=-1) + for seq in generated + ] + output = torch.cat(generated, dim=0) + res = output.view(input_ids.size(0) * num_return_sequences, -1) + return res + + def _stream(self, input_ids, eos_token_id, max_new_tokens, temperature, top_p, rp, **args): + start, first_seq = input_ids.shape[1], True + while input_ids.shape[1] < max_new_tokens - 1: + if first_seq: + out, first_seq = self(input_ids, **args), False + else: + out = self(input_ids[:, -1:], start_pos=input_ids.shape[1] - 1, **args) + logits = out.logits[:, -1, :] + logits[:, list(set(input_ids.tolist()[0]))] /= rp + logits /= (temperature + 1e-9) + if top_p is not None and top_p < 1.0: + sorted_logits, sorted_indices = torch.sort(logits, descending=True, dim=-1) + sorted_probs = F.softmax(sorted_logits, dim=-1) + cumulative_probs = torch.cumsum(sorted_probs, dim=-1) + sorted_indices_to_remove = cumulative_probs > top_p + sorted_indices_to_remove[:, 1:] = sorted_indices_to_remove[:, :-1].clone() + sorted_indices_to_remove[:, 0] = False + indices_to_remove = sorted_indices_to_remove.scatter(1, sorted_indices, sorted_indices_to_remove) + logits[indices_to_remove] = -float('Inf') + input_ids_next = torch.multinomial(F.softmax(logits, dim=-1), num_samples=1) + input_ids = torch.cat((input_ids, input_ids_next), dim=1) + yield input_ids[:, start:] + if input_ids_next.item() == eos_token_id: + break diff --git a/model/model_lora.py b/model/model_lora.py new file mode 100644 index 0000000..ea53a27 --- /dev/null +++ b/model/model_lora.py @@ -0,0 +1,49 @@ +import torch +from torch import optim, nn + + +# 定义Lora网络结构 +class LoRA(nn.Module): + def __init__(self, in_features, out_features, rank): + super().__init__() + self.rank = rank # LoRA的秩(rank),控制低秩矩阵的大小 + self.A = nn.Linear(in_features, rank, bias=False) # 低秩矩阵A + self.B = nn.Linear(rank, out_features, bias=False) # 低秩矩阵B + # 矩阵A高斯初始化 + self.A.weight.data.normal_(mean=0.0, std=0.02) + # 矩阵B全0初始化 + self.B.weight.data.zero_() + + def forward(self, x): + return self.B(self.A(x)) + + +def apply_lora(model, rank=16): + for name, module in model.named_modules(): + if isinstance(module, nn.Linear) and module.weight.shape[0] == module.weight.shape[1]: + lora = LoRA(module.weight.shape[0], module.weight.shape[1], rank=rank).to(model.device) + setattr(module, "lora", lora) + original_forward = module.forward + + # 显式绑定 + def forward_with_lora(x, layer1=original_forward, layer2=lora): + return layer1(x) + layer2(x) + + module.forward = forward_with_lora + + +def load_lora(model, path): + state_dict = torch.load(path, map_location=model.device) + for name, module in model.named_modules(): + if hasattr(module, 'lora'): + lora_state = {k.replace(f'{name}.lora.', ''): v for k, v in state_dict.items() if f'{name}.lora.' in k} + module.lora.load_state_dict(lora_state) + + +def save_lora(model, path): + state_dict = {} + for name, module in model.named_modules(): + if hasattr(module, 'lora'): + lora_state = {f'{name}.lora.{k}': v for k, v in module.lora.state_dict().items()} + state_dict.update(lora_state) + torch.save(state_dict, path) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6d5e239 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,120 @@ +aiohappyeyeballs==2.6.1 +aiohttp==3.11.17 +aiosignal==1.3.2 +altair==5.5.0 +annotated-types==0.7.0 +anyio==4.9.0 +async-timeout==5.0.1 +attrs==25.3.0 +blinker==1.9.0 +cachetools==5.5.2 +certifi==2025.1.31 +charset-normalizer==3.4.1 +click==8.1.8 +contourpy==1.3.2 +cycler==0.12.1 +datasets==2.21.0 +datasketch==1.6.4 +dill==0.3.8 +distro==1.9.0 +docker-pycreds==0.4.0 +einops==0.8.1 +exceptiongroup==1.2.2 +filelock==3.18.0 +Flask==3.0.3 +Flask-Cors==4.0.0 +fonttools==4.57.0 +frozenlist==1.6.0 +fsspec==2024.6.1 +gitdb==4.0.12 +GitPython==3.1.44 +h11==0.14.0 +hjson==3.1.0 +httpcore==1.0.8 +httpx==0.28.1 +huggingface-hub==0.30.2 +idna==3.10 +importlib_metadata==7.2.1 +itsdangerous==2.2.0 +jieba==0.42.1 +Jinja2==3.1.2 +jiter==0.9.0 +joblib==1.4.2 +jsonlines==4.0.0 +jsonschema==4.23.0 +jsonschema-specifications==2024.10.1 +kiwisolver==1.4.8 +markdown-it-py==3.0.0 +MarkupSafe==3.0.2 +marshmallow==3.22.0 +matplotlib==3.10.0 +mdurl==0.1.2 +modelscope==1.25.0 +mpmath==1.3.0 +msgpack==1.1.0 +multidict==6.4.3 +multiprocess==0.70.16 +narwhals==1.35.0 +networkx==3.4.2 +ngrok==1.4.0 +ninja==1.11.1.4 +nltk==3.8 +numpy==1.26.4 +openai==1.59.6 +packaging==23.2 +pandas==1.5.3 +peft==0.7.1 +pillow==10.4.0 +platformdirs==4.3.7 +propcache==0.3.1 +protobuf==4.25.6 +psutil==5.9.8 +py-cpuinfo==9.0.0 +pyarrow==19.0.1 +pydantic==2.8.2 +pydantic_core==2.20.1 +pydeck==0.9.1 +Pygments==2.19.1 +pyparsing==3.2.3 +python-dateutil==2.9.0.post0 +pytz==2025.2 +PyYAML==6.0.2 +referencing==0.36.2 +regex==2024.11.6 +requests==2.32.3 +rich==13.7.1 +rpds-py==0.24.0 +safetensors==0.5.3 +scikit-learn==1.5.1 +scipy==1.15.2 +sentence-transformers==2.3.1 +sentencepiece==0.2.0 +sentry-sdk==2.26.1 +setproctitle==1.3.5 +simhash==2.1.2 +six==1.17.0 +smmap==5.0.2 +sniffio==1.3.1 +streamlit==1.30.0 +sympy==1.13.3 +tenacity==8.5.0 +threadpoolctl==3.6.0 +tiktoken==0.5.1 +tokenizers==0.21.1 +toml==0.10.2 +tornado==6.4.2 +tqdm==4.67.1 +transformers==4.48.0 +triton==3.3.0 +trl==0.13.0 +typing_extensions==4.13.2 +tzlocal==5.3.1 +ujson==5.1.0 +urllib3==2.4.0 +validators==0.34.0 +wandb==0.18.3 +watchdog==6.0.0 +Werkzeug==3.1.3 +xxhash==3.5.0 +yarl==1.20.0 +zipp==3.21.0 diff --git a/run_file/DynamicKV-LLM_Mini_Minimind.sh b/run_file/DynamicKV-LLM_Mini_Minimind.sh new file mode 100644 index 0000000..483c194 --- /dev/null +++ b/run_file/DynamicKV-LLM_Mini_Minimind.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +# 激活conda环境 +# source $(conda info --base)/etc/profile.d/conda.sh +# conda activate ycz_accelerate + +# 设置环境变量以帮助调试 +export NCCL_DEBUG=INFO +export PYTHONFAULTHANDLER=1 + +# 方法1: 使用预先配置的accelerate配置文件 +# accelerate launch --config_file accelerate_config.yaml train_pretrain_accelerate.py \ +# --epochs 3 \ +# --batch_size 24 \ +# --learning_rate 2e-4 \ +# --dtype bfloat16 \ +# --accumulation_steps 32 \ +# --grad_clip 1.0 \ +# --log_interval 100 \ +# --save_interval 10000 \ +# --dim 1024 \ +# --n_layers 32 \ +# --max_seq_len 1024 \ +# --use_flash_attn \ +# --profile \ +# --profile_interval 10 + +# 方法2: 使用命令行参数直接配置accelerate +CUDA_VISIBLE_DEVICES=0 accelerate launch \ + --num_processes=1 \ + --mixed_precision=bf16 \ + --main_process_port=29500 \ + train_pretrain_accelerate.py \ + --epochs 3 \ + --batch_size 24 \ + --learning_rate 2e-4 \ + --dtype bfloat16 \ + --accumulation_steps 32 \ + --grad_clip 1.0 \ + --log_interval 100 \ + --save_interval 10000 \ + --dim 512 \ + --n_layers 12 \ + --max_seq_len 512 \ + --use_flash_attn \ + --profile \ + --profile_interval 10 diff --git a/run_file/DynamicKV-LLM_Small_Minimind.sh b/run_file/DynamicKV-LLM_Small_Minimind.sh new file mode 100644 index 0000000..55edd0f --- /dev/null +++ b/run_file/DynamicKV-LLM_Small_Minimind.sh @@ -0,0 +1,48 @@ +#!/bin/bash + +# 激活conda环境 +source $(conda info --base)/etc/profile.d/conda.sh +conda activate ycz_accelerate + +# 设置环境变量以帮助调试 +export NCCL_DEBUG=INFO +export PYTHONFAULTHANDLER=1 + +# 方法1: 使用预先配置的accelerate配置文件 +# accelerate launch --config_file accelerate_config.yaml train_pretrain_accelerate.py \ +# --epochs 3 \ +# --batch_size 24 \ +# --learning_rate 2e-4 \ +# --dtype bfloat16 \ +# --accumulation_steps 32 \ +# --grad_clip 1.0 \ +# --log_interval 100 \ +# --save_interval 10000 \ +# --dim 1024 \ +# --n_layers 32 \ +# --max_seq_len 1024 \ +# --use_flash_attn \ +# --profile \ +# --profile_interval 10 + +# 方法2: 使用命令行参数直接配置accelerate +CUDA_VISIBLE_DEVICES=0,1,2,3 accelerate launch \ + --multi_gpu \ + --num_processes=4 \ + --mixed_precision=bf16 \ + --main_process_port=29500 \ + train_pretrain_accelerate.py \ + --epochs 3 \ + --batch_size 24 \ + --learning_rate 2e-4 \ + --dtype bfloat16 \ + --accumulation_steps 32 \ + --grad_clip 1.0 \ + --log_interval 100 \ + --save_interval 10000 \ + --dim 1024 \ + --n_layers 32 \ + --max_seq_len 1024 \ + --use_flash_attn \ + --profile \ + --profile_interval 10 diff --git a/scripts/chat_openai_api.py b/scripts/chat_openai_api.py new file mode 100644 index 0000000..2f2bc53 --- /dev/null +++ b/scripts/chat_openai_api.py @@ -0,0 +1,30 @@ +from openai import OpenAI + +client = OpenAI( + api_key="none", + base_url="http://localhost:8998/v1" +) +stream = True +conversation_history_origin = [] +conversation_history = conversation_history_origin.copy() +history_messages_num = 2 # 设置为偶数(Q+A),为0则每次不携带历史对话进行独立QA +while True: + query = input('[Q]: ') + conversation_history.append({"role": "user", "content": query}) + response = client.chat.completions.create( + model="minimind", + messages=conversation_history[-history_messages_num:], + stream=stream + ) + if not stream: + assistant_res = response.choices[0].message.content + print('[A]: ', assistant_res) + else: + print('[A]: ', end='') + assistant_res = '' + for chunk in response: + print(chunk.choices[0].delta.content or "", end="") + assistant_res += chunk.choices[0].delta.content or "" + + conversation_history.append({"role": "assistant", "content": assistant_res}) + print('\n\n') diff --git a/scripts/convert_model.py b/scripts/convert_model.py new file mode 100644 index 0000000..9c2209f --- /dev/null +++ b/scripts/convert_model.py @@ -0,0 +1,62 @@ +import torch +import warnings +import sys +import os + +__package__ = "scripts" +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from transformers import AutoTokenizer, AutoModelForCausalLM +from model.LMConfig import LMConfig +from model.model import MiniMindLM + +warnings.filterwarnings('ignore', category=UserWarning) + + +def convert_torch2transformers(torch_path, transformers_path): + def export_tokenizer(transformers_path): + tokenizer = AutoTokenizer.from_pretrained('../model/minimind_tokenizer') + tokenizer.save_pretrained(transformers_path) + + LMConfig.register_for_auto_class() + MiniMindLM.register_for_auto_class("AutoModelForCausalLM") + lm_model = MiniMindLM(lm_config) + device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + state_dict = torch.load(torch_path, map_location=device) + lm_model.load_state_dict(state_dict, strict=False) + model_params = sum(p.numel() for p in lm_model.parameters() if p.requires_grad) + print(f'模型参数: {model_params / 1e6} 百万 = {model_params / 1e9} B (Billion)') + lm_model.save_pretrained(transformers_path, safe_serialization=False) + export_tokenizer(transformers_path) + print(f"模型已保存为 Transformers 格式: {transformers_path}") + + +def convert_transformers2torch(transformers_path, torch_path): + model = AutoModelForCausalLM.from_pretrained(transformers_path, trust_remote_code=True) + torch.save(model.state_dict(), torch_path) + print(f"模型已保存为 PyTorch 格式: {torch_path}") + + +# don't need to use +def push_to_hf(export_model_path): + def init_model(): + tokenizer = AutoTokenizer.from_pretrained('../model/minimind_tokenizer') + model = AutoModelForCausalLM.from_pretrained(export_model_path, trust_remote_code=True) + return model, tokenizer + + model, tokenizer = init_model() + # model.push_to_hub(model_path) + # tokenizer.push_to_hub(model_path, safe_serialization=False) + + +if __name__ == '__main__': + lm_config = LMConfig(dim=512, n_layers=8, max_seq_len=8192, use_moe=False) + + torch_path = f"../out/rlhf_{lm_config.dim}{'_moe' if lm_config.use_moe else ''}.pth" + + transformers_path = '../MiniMind2-Small' + + # convert torch to transformers model + convert_torch2transformers(torch_path, transformers_path) + + # # convert transformers to torch model + # convert_transformers2torch(transformers_path, torch_path) diff --git a/scripts/serve_openai_api.py b/scripts/serve_openai_api.py new file mode 100644 index 0000000..721d4e5 --- /dev/null +++ b/scripts/serve_openai_api.py @@ -0,0 +1,164 @@ +import argparse +import json +import os +import sys + +__package__ = "scripts" +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +import time +import torch +import warnings +import uvicorn +from fastapi import FastAPI, HTTPException +from fastapi.responses import StreamingResponse +from pydantic import BaseModel +from transformers import AutoTokenizer, AutoModelForCausalLM +from model.LMConfig import LMConfig +from model.model import MiniMindLM +from model.model_lora import apply_lora, load_lora + +warnings.filterwarnings('ignore') + +app = FastAPI() + + +def init_model(args): + tokenizer = AutoTokenizer.from_pretrained('../model/minimind_tokenizer') + if args.load == 0: + moe_path = '_moe' if args.use_moe else '' + modes = {0: 'pretrain', 1: 'full_sft', 2: 'rlhf', 3: 'reason'} + ckp = f'../{args.out_dir}/{modes[args.model_mode]}_{args.dim}{moe_path}.pth' + + model = MiniMindLM(LMConfig( + dim=args.dim, + n_layers=args.n_layers, + max_seq_len=args.max_seq_len, + use_moe=args.use_moe + )) + + state_dict = torch.load(ckp, map_location=device) + model.load_state_dict({k: v for k, v in state_dict.items() if 'mask' not in k}, strict=True) + + if args.lora_name != 'None': + apply_lora(model) + load_lora(model, f'../{args.out_dir}/{args.lora_name}_{args.dim}.pth') + else: + model = AutoModelForCausalLM.from_pretrained( + './MiniMind2', + trust_remote_code=True + ) + print(f'MiniMind模型参数量: {sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.2f}M(illion)') + return model.eval().to(device), tokenizer + + +class ChatRequest(BaseModel): + model: str + messages: list + temperature: float = 0.7 + top_p: float = 0.92 + max_tokens: int = 8192 + stream: bool = False + + +def generate_stream_response(messages, temperature, top_p, max_tokens): + try: + new_prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)[-max_tokens:] + x = tokenizer(new_prompt).data['input_ids'] + x = (torch.tensor(x, dtype=torch.long, device=device)[None, ...]) + with torch.no_grad(): + res_y = model.generate( + x, + eos_token_id=tokenizer.eos_token_id, + max_new_tokens=max_tokens, + temperature=temperature, + top_p=top_p, + stream=True, + rp=1., + pad_token_id=tokenizer.pad_token_id + ) + history_idx = 0 + for y in res_y: + answer = tokenizer.decode(y[0].tolist(), skip_special_tokens=True) + if (answer and answer[-1] == '�') or not answer: + continue + delta = answer[history_idx:] + history_idx = len(answer) + json_data = { + 'id': f'chatcmpl-{int(time.time())}', + 'object': 'chat.completion.chunk', + 'created': int(time.time()), + 'model': 'minimind', + 'choices': [{'index': 0, 'delta': {'content': delta}, 'finish_reason': None}] + } + yield f"data: {json.dumps(json_data)}\n\n" + + except Exception as e: + yield f"data: {json.dumps({'error': str(e)})}\n\n" + + +@app.post("/v1/chat/completions") +async def chat_completions(request: ChatRequest): + try: + if request.stream: + return StreamingResponse( + generate_stream_response( + messages=request.messages, + temperature=request.temperature, + top_p=request.top_p, + max_tokens=request.max_tokens + ), + media_type="text/event-stream" + ) + else: + new_prompt = tokenizer.apply_chat_template( + request.messages, + tokenize=False, + add_generation_prompt=True + )[-request.max_tokens:] + x = tokenizer(new_prompt).data['input_ids'] + x = (torch.tensor(x, dtype=torch.long, device=device)[None, ...]) + with torch.no_grad(): + res_y = model.generate( + x, + eos_token_id=tokenizer.eos_token_id, + max_new_tokens=request.max_tokens, + temperature=request.temperature, + top_p=request.top_p, + stream=False, + rp=1., + pad_token_id=tokenizer.pad_token_id + ) + answer = tokenizer.decode(res_y.squeeze()[x.shape[1]:].tolist(), skip_special_tokens=True) + return { + "id": f"chatcmpl-{int(time.time())}", + "object": "chat.completion", + "created": int(time.time()), + "model": "minimind", + "choices": [ + { + "index": 0, + "message": {"role": "assistant", "content": answer}, + "finish_reason": "stop" + } + ] + } + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Server for MiniMind") + parser.add_argument('--out_dir', default='out', type=str) + parser.add_argument('--lora_name', default='None', type=str) + parser.add_argument('--dim', default=512, type=int) + parser.add_argument('--n_layers', default=8, type=int) + parser.add_argument('--max_seq_len', default=8192, type=int) + parser.add_argument('--use_moe', default=False, type=bool) + parser.add_argument('--load', default=0, type=int, help="0: 从原生torch权重,1: 利用transformers加载") + parser.add_argument('--model_mode', default=1, type=int, help="0: 预训练模型,1: SFT-Chat模型,2: RLHF-Chat模型,3: Reason模型") + + device = 'cuda' if torch.cuda.is_available() else 'cpu' + model, tokenizer = init_model(parser.parse_args()) + + uvicorn.run(app, host="0.0.0.0", port=8998) diff --git a/scripts/train_tokenizer.py b/scripts/train_tokenizer.py new file mode 100644 index 0000000..868099a --- /dev/null +++ b/scripts/train_tokenizer.py @@ -0,0 +1,152 @@ +import random +from tqdm import tqdm +from transformers import AutoTokenizer +import json +from datasets import load_dataset +from tokenizers import ( + decoders, + models, + normalizers, + pre_tokenizers, + processors, + trainers, + Tokenizer, +) +import os + +random.seed(42) + + +def train_tokenizer(): + # 读取JSONL文件并提取文本数据 + def read_texts_from_jsonl(file_path): + with open(file_path, 'r', encoding='utf-8') as f: + for line in f: + data = json.loads(line) + yield data['text'] + + data_path = '../dataset/pretrain_hq.jsonl' + + # 初始化tokenizer + tokenizer = Tokenizer(models.BPE()) + tokenizer.pre_tokenizer = pre_tokenizers.ByteLevel(add_prefix_space=False) + + # 定义特殊token + special_tokens = ["", "", ""] + + # 设置训练器并添加特殊token + trainer = trainers.BpeTrainer( + vocab_size=6400, + special_tokens=special_tokens, # 确保这三个token被包含 + show_progress=True, + initial_alphabet=pre_tokenizers.ByteLevel.alphabet() + ) + + # 读取文本数据 + texts = read_texts_from_jsonl(data_path) + + # 训练tokenizer + tokenizer.train_from_iterator(texts, trainer=trainer) + + # 设置解码器 + tokenizer.decoder = decoders.ByteLevel() + + # 检查特殊token的索引 + assert tokenizer.token_to_id("") == 0 + assert tokenizer.token_to_id("") == 1 + assert tokenizer.token_to_id("") == 2 + + # 保存tokenizer + tokenizer_dir = "../model/minimind_tokenizer" + os.makedirs(tokenizer_dir, exist_ok=True) + tokenizer.save(os.path.join(tokenizer_dir, "tokenizer.json")) + tokenizer.model.save("../model/minimind_tokenizer") + + # 手动创建配置文件 + config = { + "add_bos_token": False, + "add_eos_token": False, + "add_prefix_space": False, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": False, + "normalized": False, + "rstrip": False, + "single_word": False, + "special": True + }, + "1": { + "content": "", + "lstrip": False, + "normalized": False, + "rstrip": False, + "single_word": False, + "special": True + }, + "2": { + "content": "", + "lstrip": False, + "normalized": False, + "rstrip": False, + "single_word": False, + "special": True + } + }, + "additional_special_tokens": [], + "bos_token": "", + "clean_up_tokenization_spaces": False, + "eos_token": "", + "legacy": True, + "model_max_length": 32768, + "pad_token": "", + "sp_model_kwargs": {}, + "spaces_between_special_tokens": False, + "tokenizer_class": "PreTrainedTokenizerFast", + "unk_token": "", + "chat_template": "{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{{ 'system\\n' + system_message + '\\n' }}{% else %}{{ 'system\\n你是 MiniMind,是一个有用的人工智能助手。\\n' }}{% endif %}{% for message in messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ 'user\\n' + content + '\\nassistant\\n' }}{% elif message['role'] == 'assistant' %}{{ content + '' + '\\n' }}{% endif %}{% endfor %}" + } + + # 保存配置文件 + with open(os.path.join(tokenizer_dir, "tokenizer_config.json"), "w", encoding="utf-8") as config_file: + json.dump(config, config_file, ensure_ascii=False, indent=4) + + print("Tokenizer training completed and saved.") + + +def eval_tokenizer(): + from transformers import AutoTokenizer + + # 加载预训练的tokenizer + tokenizer = AutoTokenizer.from_pretrained("../model/minimind_tokenizer") + + messages = [ + {"role": "system", "content": "你是一个优秀的聊天机器人,总是给我正确的回应!"}, + {"role": "user", "content": '你来自哪里?'}, + {"role": "assistant", "content": '我来自地球'} + ] + new_prompt = tokenizer.apply_chat_template( + messages, + tokenize=False + ) + print(new_prompt) + + # 获取实际词汇表长度(包括特殊符号) + actual_vocab_size = len(tokenizer) + print('tokenizer实际词表长度:', actual_vocab_size) + + model_inputs = tokenizer(new_prompt) + print('encoder长度:', len(model_inputs['input_ids'])) + + input_ids = model_inputs['input_ids'] + response = tokenizer.decode(input_ids, skip_special_tokens=False) + print('decoder和原始文本是否一致:', response == new_prompt) + + +def main(): + train_tokenizer() + eval_tokenizer() + + +if __name__ == '__main__': + main() diff --git a/scripts/web_demo.py b/scripts/web_demo.py new file mode 100644 index 0000000..be05159 --- /dev/null +++ b/scripts/web_demo.py @@ -0,0 +1,293 @@ +import random +import re +import time + +import numpy as np +import streamlit as st +import torch + +st.set_page_config(page_title="MiniMind", initial_sidebar_state="collapsed") + +# 在文件开头的 CSS 样式中修改按钮样式 +st.markdown(""" + +""", unsafe_allow_html=True) + +system_prompt = [] +device = "cuda" if torch.cuda.is_available() else "cpu" + + +def process_assistant_content(content): + if 'R1' not in MODEL_PATHS[selected_model][1]: + return content + + if '' in content and '' in content: + content = re.sub(r'()(.*?)()', + r'
推理内容(展开)\2
', + content, + flags=re.DOTALL) + + if '' in content and '' not in content: + content = re.sub(r'(.*?)$', + r'
推理中...\1
', + content, + flags=re.DOTALL) + + if '' not in content and '' in content: + content = re.sub(r'(.*?)
', + r'
推理内容(展开)\1
', + content, + flags=re.DOTALL) + + return content + + +@st.cache_resource +def load_model_tokenizer(model_path): + model = AutoModelForCausalLM.from_pretrained( + model_path, + trust_remote_code=True + ) + tokenizer = AutoTokenizer.from_pretrained( + model_path, + trust_remote_code=True + ) + model = model.eval().to(device) + return model, tokenizer + + +def clear_chat_messages(): + del st.session_state.messages + del st.session_state.chat_messages + + +def init_chat_messages(): + if "messages" in st.session_state: + for i, message in enumerate(st.session_state.messages): + if message["role"] == "assistant": + with st.chat_message("assistant", avatar=image_url): + st.markdown(process_assistant_content(message["content"]), unsafe_allow_html=True) + # 在消息内容下方添加按钮 + if st.button("🗑", key=f"delete_{i}"): + st.session_state.messages.pop(i) + st.session_state.messages.pop(i - 1) + st.session_state.chat_messages.pop(i) + st.session_state.chat_messages.pop(i - 1) + st.rerun() + else: + st.markdown( + f'
{message["content"]}
', + unsafe_allow_html=True) + + else: + st.session_state.messages = [] + st.session_state.chat_messages = [] + + return st.session_state.messages + + +# 添加这两个辅助函数 +def regenerate_answer(index): + st.session_state.messages.pop() + st.session_state.chat_messages.pop() + st.rerun() + + +def delete_conversation(index): + st.session_state.messages.pop(index) + st.session_state.messages.pop(index - 1) + st.session_state.chat_messages.pop(index) + st.session_state.chat_messages.pop(index - 1) + st.rerun() + + +# 侧边栏模型选择 +st.sidebar.title("模型设定调整") + +st.sidebar.text("【注】训练数据偏差,增加上下文记忆时\n多轮对话(较单轮)容易出现能力衰减") +st.session_state.history_chat_num = st.sidebar.slider("Number of Historical Dialogues", 0, 6, 0, step=2) +# st.session_state.history_chat_num = 0 +st.session_state.max_new_tokens = st.sidebar.slider("Max Sequence Length", 256, 8192, 8192, step=1) +st.session_state.top_p = st.sidebar.slider("Top-P", 0.8, 0.99, 0.85, step=0.01) +st.session_state.temperature = st.sidebar.slider("Temperature", 0.6, 1.2, 0.85, step=0.01) + +# 模型路径映射 +MODEL_PATHS = { + "MiniMind2-R1 (0.1B)": ["../MiniMind2-R1", "MiniMind2-R1"], + "MiniMind2-Small-R1 (0.02B)": ["../MiniMind2-Small-R1", "MiniMind2-Small-R1"], + "MiniMind2 (0.1B)": ["../MiniMind2", "MiniMind2"], + "MiniMind2-MoE (0.15B)": ["../MiniMind2-MoE", "MiniMind2-MoE"], + "MiniMind2-Small (0.02B)": ["../MiniMind2-Small", "MiniMind2-Small"], + "MiniMind-V1 (0.1B)": ["../minimind-v1", "MiniMind-V1"], + "MiniMind-V1-MoE (0.1B)": ["../minimind-v1-moe", "MiniMind-V1-MoE"], + "MiniMind-V1-Small (0.02B)": ["../minimind-v1-small", "MiniMind-V1-Small"], +} + +selected_model = st.sidebar.selectbox('Models', list(MODEL_PATHS.keys()), index=2) # 默认选择 MiniMind2 +model_path = MODEL_PATHS[selected_model][0] + +slogan = f"Hi, I'm {MODEL_PATHS[selected_model][1]}" + +image_url = "https://www.modelscope.cn/api/v1/studio/gongjy/MiniMind/repo?Revision=master&FilePath=images%2Flogo2.png&View=true" + +st.markdown( + f'
' + '
' + f' ' + f'{slogan}' + '
' + '内容完全由AI生成,请务必仔细甄别
Content AI-generated, please discern with care
' + '
', + unsafe_allow_html=True +) + + +def setup_seed(seed): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + +def main(): + model, tokenizer = load_model_tokenizer(model_path) + + # 初始化消息列表 + if "messages" not in st.session_state: + st.session_state.messages = [] + st.session_state.chat_messages = [] + + # Use session state messages + messages = st.session_state.messages + + # 在显示历史消息的循环中 + for i, message in enumerate(messages): + if message["role"] == "assistant": + with st.chat_message("assistant", avatar=image_url): + st.markdown(process_assistant_content(message["content"]), unsafe_allow_html=True) + if st.button("×", key=f"delete_{i}"): + # 删除当前消息及其之后的所有消息 + st.session_state.messages = st.session_state.messages[:i - 1] + st.session_state.chat_messages = st.session_state.chat_messages[:i - 1] + st.rerun() + else: + st.markdown( + f'
{message["content"]}
', + unsafe_allow_html=True) + + # 处理新的输入或重新生成 + prompt = st.chat_input(key="input", placeholder="给 MiniMind 发送消息") + + # 检查是否需要重新生成 + if hasattr(st.session_state, 'regenerate') and st.session_state.regenerate: + prompt = st.session_state.last_user_message + regenerate_index = st.session_state.regenerate_index # 获取重新生成的位置 + # 清除所有重新生成相关的状态 + delattr(st.session_state, 'regenerate') + delattr(st.session_state, 'last_user_message') + delattr(st.session_state, 'regenerate_index') + + if prompt: + st.markdown( + f'
{prompt}
', + unsafe_allow_html=True) + messages.append({"role": "user", "content": prompt}) + st.session_state.chat_messages.append({"role": "user", "content": prompt}) + + with st.chat_message("assistant", avatar=image_url): + placeholder = st.empty() + random_seed = random.randint(0, 2 ** 32 - 1) + setup_seed(random_seed) + + st.session_state.chat_messages = system_prompt + st.session_state.chat_messages[ + -(st.session_state.history_chat_num + 1):] + new_prompt = tokenizer.apply_chat_template( + st.session_state.chat_messages, + tokenize=False, + add_generation_prompt=True + )[-(st.session_state.max_new_tokens - 1):] + + x = torch.tensor(tokenizer(new_prompt)['input_ids'], device=device).unsqueeze(0) + with torch.no_grad(): + res_y = model.generate(x, tokenizer.eos_token_id, max_new_tokens=st.session_state.max_new_tokens, + temperature=st.session_state.temperature, + top_p=st.session_state.top_p, stream=True) + try: + for y in res_y: + answer = tokenizer.decode(y[0].tolist(), skip_special_tokens=True) + if (answer and answer[-1] == '�') or not answer: + continue + placeholder.markdown(process_assistant_content(answer), unsafe_allow_html=True) + except StopIteration: + print("No answer") + + assistant_answer = answer.replace(new_prompt, "") + messages.append({"role": "assistant", "content": assistant_answer}) + st.session_state.chat_messages.append({"role": "assistant", "content": assistant_answer}) + + with st.empty(): + if st.button("×", key=f"delete_{len(messages) - 1}"): + st.session_state.messages = st.session_state.messages[:-2] + st.session_state.chat_messages = st.session_state.chat_messages[:-2] + st.rerun() + + +if __name__ == "__main__": + from transformers import AutoModelForCausalLM, AutoTokenizer + + main() diff --git a/test_real_rope.py b/test_real_rope.py new file mode 100644 index 0000000..fe65292 --- /dev/null +++ b/test_real_rope.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +测试实数版本的位置编码 +""" + +import torch +from model.model import precompute_pos_cis, precompute_pos_cis_real, apply_rotary_emb, apply_rotary_emb_real +from model.LMConfig import LMConfig +from model.model import MiniMindLM + +def test_pos_encoding_equivalence(): + """测试复数版本和实数版本的位置编码是否等价""" + print("测试位置编码等价性...") + + # 参数设置 + dim = 64 + seq_len = 10 + + # 生成复数版本的位置编码 + pos_cis = precompute_pos_cis(dim=dim, end=seq_len) + + # 生成实数版本的位置编码 + pos_cis_real = precompute_pos_cis_real(dim=dim, end=seq_len) + + # 创建随机查询和键 + batch_size = 2 + n_heads = 4 + head_dim = dim + + xq = torch.randn(batch_size, seq_len, n_heads, head_dim) + xk = torch.randn(batch_size, seq_len, n_heads, head_dim) + + # 应用复数版本的旋转位置编码 + xq_complex, xk_complex = apply_rotary_emb(xq, xk, pos_cis) + + # 应用实数版本的旋转位置编码 + xq_real, xk_real = apply_rotary_emb_real(xq, xk, pos_cis_real) + + # 计算差异 + q_diff = torch.abs(xq_complex - xq_real).mean().item() + k_diff = torch.abs(xk_complex - xk_real).mean().item() + + print(f"查询差异: {q_diff:.6f}") + print(f"键差异: {k_diff:.6f}") + + # 检查差异是否在可接受范围内 + tolerance = 1e-5 + if q_diff < tolerance and k_diff < tolerance: + print("✅ 测试通过: 复数版本和实数版本的位置编码在数值上等价") + else: + print("❌ 测试失败: 复数版本和实数版本的位置编码存在显著差异") + +def test_model_forward(): + """测试模型前向传播""" + print("\n测试模型前向传播...") + + # 创建模型配置 + config = LMConfig( + dim=128, + n_layers=2, + n_heads=4, + n_kv_heads=4, # 确保n_kv_heads被设置,且n_heads能被n_kv_heads整除 + vocab_size=1000, + max_seq_len=128, + disable_db=True # 禁用数据库功能,避免额外的复杂性 + ) + + # 创建模型 + try: + model = MiniMindLM(config) + print(f"✅ 模型初始化成功") + except Exception as e: + print(f"❌ 模型初始化失败: {str(e)}") + return + + # 创建输入 + batch_size = 2 + seq_len = 10 + input_ids = torch.randint(0, config.vocab_size, (batch_size, seq_len)) + + # 前向传播 + try: + with torch.no_grad(): + outputs = model(input_ids) + print(f"✅ 模型前向传播成功") + print(f"输出形状: {outputs.logits.shape}") + except Exception as e: + print(f"❌ 模型前向传播失败: {str(e)}") + +if __name__ == "__main__": + # 测试位置编码等价性 + test_pos_encoding_equivalence() + + # 测试模型前向传播 + test_model_forward() diff --git a/train_distill_reason.py b/train_distill_reason.py new file mode 100644 index 0000000..93effde --- /dev/null +++ b/train_distill_reason.py @@ -0,0 +1,215 @@ +import os +import platform +import argparse +import time +import math +import warnings + +import pandas as pd +import torch +import torch.nn.functional as F +import torch.distributed as dist +from contextlib import nullcontext + +from torch import optim, nn +from torch.nn.parallel import DistributedDataParallel +from torch.utils.data import DataLoader, DistributedSampler +from transformers import AutoTokenizer, AutoModelForCausalLM +from model.model import MiniMindLM +from model.LMConfig import LMConfig +from model.dataset import SFTDataset + +warnings.filterwarnings('ignore') + + +def Logger(content): + if not ddp or dist.get_rank() == 0: + print(content) + + +def get_lr(current_step, total_steps, lr): + return lr / 10 + 0.5 * lr * (1 + math.cos(math.pi * current_step / total_steps)) + + +def train_epoch(epoch, wandb): + # 思考标签占位符 + start_of_think_ids = tokenizer('').input_ids + end_of_think_ids = tokenizer('').input_ids + start_of_answer_ids = tokenizer('').input_ids + end_of_answer_ids = tokenizer('').input_ids + loss_fct = nn.CrossEntropyLoss(reduction='none') + start_time = time.time() + for step, (X, Y, loss_mask) in enumerate(train_loader): + X = X.to(args.device) + Y = Y.to(args.device) + loss_mask = loss_mask.to(args.device) + lr = get_lr(epoch * iter_per_epoch + step, args.epochs * iter_per_epoch, args.learning_rate) + for param_group in optimizer.param_groups: + param_group['lr'] = lr + + with ctx: + res = model(X) + loss = loss_fct( + res.logits.view(-1, res.logits.size(-1)), + Y.view(-1) + ).view(Y.size()) + sp_ids = torch.isin(Y.view(-1), + torch.tensor(start_of_think_ids + end_of_think_ids + + start_of_answer_ids + end_of_answer_ids + ).to(args.device)) + # 在 sp_ids 对应的位置增加额外的惩罚 + loss_mask = loss_mask.view(-1) + loss_mask_sum = loss_mask.sum() + loss_mask[sp_ids] = 10 + loss_mask = loss_mask.view(Y.size()) + loss = (loss * loss_mask).sum() / loss_mask_sum + loss += res.aux_loss + loss = loss / args.accumulation_steps + + scaler.scale(loss).backward() + + if (step + 1) % args.accumulation_steps == 0: + scaler.unscale_(optimizer) + torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + scaler.step(optimizer) + scaler.update() + + optimizer.zero_grad(set_to_none=True) + + if step % args.log_interval == 0: + spend_time = time.time() - start_time + Logger( + 'Epoch:[{}/{}]({}/{}) loss:{:.3f} lr:{:.12f} epoch_Time:{}min:'.format( + epoch + 1, + args.epochs, + step, + iter_per_epoch, + loss.item(), + optimizer.param_groups[-1]['lr'], + spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60)) + + if (wandb is not None) and (not ddp or dist.get_rank() == 0): + wandb.log({"loss": loss, + "lr": optimizer.param_groups[-1]['lr'], + "epoch_Time": spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60}) + + if (step + 1) % args.save_interval == 0 and (not ddp or dist.get_rank() == 0): + model.eval() + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'{args.save_dir}/reason_{lm_config.dim}{moe_path}.pth' + + if isinstance(model, torch.nn.parallel.DistributedDataParallel): + state_dict = model.module.state_dict() + else: + state_dict = model.state_dict() + + torch.save(state_dict, ckp) + model.train() + + +def init_model(lm_config): + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + model = MiniMindLM(lm_config) + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'./out/rlhf_{lm_config.dim}{moe_path}.pth' + state_dict = torch.load(ckp, map_location=args.device) + model.load_state_dict(state_dict, strict=False) + Logger(f'LLM总参数量:{sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.3f} 百万') + model = model.to(args.device) + return model, tokenizer + + +def init_distributed_mode(): + if not ddp: return + global ddp_local_rank, DEVICE + + dist.init_process_group(backend="nccl") + ddp_rank = int(os.environ["RANK"]) + ddp_local_rank = int(os.environ["LOCAL_RANK"]) + ddp_world_size = int(os.environ["WORLD_SIZE"]) + DEVICE = f"cuda:{ddp_local_rank}" + torch.cuda.set_device(DEVICE) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="MiniMind Distill Reasoning") + parser.add_argument("--out_dir", type=str, default="out") + parser.add_argument("--epochs", type=int, default=1) + parser.add_argument("--batch_size", type=int, default=8) + parser.add_argument("--learning_rate", type=float, default=1e-6) + parser.add_argument("--device", type=str, default="cuda:0" if torch.cuda.is_available() else "cpu") + parser.add_argument("--dtype", type=str, default="bfloat16") + parser.add_argument("--use_wandb", action="store_true") + parser.add_argument("--wandb_project", type=str, default="MiniMind-Full-SFT") + parser.add_argument("--num_workers", type=int, default=1) + parser.add_argument("--ddp", action="store_true") + parser.add_argument("--accumulation_steps", type=int, default=1) + parser.add_argument("--grad_clip", type=float, default=1.0) + parser.add_argument("--warmup_iters", type=int, default=0) + parser.add_argument("--log_interval", type=int, default=1) + parser.add_argument("--save_interval", type=int, default=50) + parser.add_argument('--local_rank', type=int, default=-1) + parser.add_argument('--dim', default=512, type=int) + parser.add_argument('--n_layers', default=8, type=int) + parser.add_argument('--max_seq_len', default=1024, type=int) + parser.add_argument('--use_moe', default=False, type=bool) + parser.add_argument("--data_path", type=str, default="./dataset/r1_mix_1024.jsonl") + + args = parser.parse_args() + + lm_config = LMConfig(dim=args.dim, n_layers=args.n_layers, max_seq_len=args.max_seq_len, use_moe=args.use_moe) + args.save_dir = os.path.join(args.out_dir) + os.makedirs(args.save_dir, exist_ok=True) + os.makedirs(args.out_dir, exist_ok=True) + tokens_per_iter = args.batch_size * lm_config.max_seq_len + device_type = "cuda" if "cuda" in args.device else "cpu" + + args.wandb_run_name = f"MiniMind-Distill-Reasoning-Epoch-{args.epochs}-BatchSize-{args.batch_size}-LearningRate-{args.learning_rate}" + + ctx = nullcontext() if device_type == "cpu" else torch.cuda.amp.autocast() + ddp = int(os.environ.get("RANK", -1)) != -1 # is this a ddp run? + ddp_local_rank, DEVICE = 0, "cuda:0" + base_seed = 1337 + torch.manual_seed(base_seed) + torch.cuda.manual_seed(base_seed) + + if ddp: + init_distributed_mode() + args.device = torch.device(DEVICE) + rank = dist.get_rank() + torch.manual_seed(base_seed + rank) + # 同时设置 CUDA 的随机种子 + torch.cuda.manual_seed(base_seed + rank) + + if args.use_wandb and (not ddp or ddp_local_rank == 0): + import wandb + + wandb.init(project=args.wandb_project, name=args.wandb_run_name) + else: + wandb = None + + model, tokenizer = init_model(lm_config) + + train_ds = SFTDataset(args.data_path, tokenizer, max_length=lm_config.max_seq_len) + train_sampler = DistributedSampler(train_ds) if ddp else None + train_loader = DataLoader( + train_ds, + batch_size=args.batch_size, + pin_memory=True, + drop_last=False, + shuffle=False, + num_workers=args.num_workers, + sampler=train_sampler + ) + + scaler = torch.cuda.amp.GradScaler(enabled=(args.dtype in ['float16', 'bfloat16'])) + optimizer = optim.AdamW(model.parameters(), lr=args.learning_rate) + + if ddp: + model._ddp_params_and_buffers_to_ignore = {"pos_cis"} + model = DistributedDataParallel(model, device_ids=[ddp_local_rank]) + + iter_per_epoch = len(train_loader) + for epoch in range(args.epochs): + train_epoch(epoch, wandb) diff --git a/train_distillation.py b/train_distillation.py new file mode 100644 index 0000000..985e037 --- /dev/null +++ b/train_distillation.py @@ -0,0 +1,263 @@ +import os +import argparse +import time +import math +import warnings + +import pandas as pd +import torch +import torch.nn.functional as F +import torch.distributed as dist +from contextlib import nullcontext + +from torch import optim, nn +from torch.nn.parallel import DistributedDataParallel +from torch.utils.data import DataLoader, DistributedSampler +from transformers import AutoTokenizer, AutoModelForCausalLM +from model.model import MiniMindLM +from model.LMConfig import LMConfig +from model.dataset import SFTDataset + +warnings.filterwarnings('ignore') + + +def Logger(content): + if not ddp or dist.get_rank() == 0: + print(content) + + +def get_lr(current_step, total_steps, lr): + return lr / 10 + 0.5 * lr * (1 + math.cos(math.pi * current_step / total_steps)) + + +def distillation_loss_fn(student_logits, teacher_logits, temperature=1.0, reduction='batchmean'): + with torch.no_grad(): + teacher_probs = F.softmax(teacher_logits / temperature, dim=-1).detach() + + student_log_probs = F.log_softmax(student_logits / temperature, dim=-1) + + kl = F.kl_div( + student_log_probs, + teacher_probs, + reduction=reduction + ) + return (temperature ** 2) * kl + + +def train_epoch(epoch, wandb, alpha=0.0, temperature=1.0): + start_time = time.time() + + if teacher_model is not None: + teacher_model.eval() + teacher_model.requires_grad_(False) + + for step, (X, Y, loss_mask) in enumerate(train_loader): + X = X.to(args.device) + Y = Y.to(args.device) + loss_mask = loss_mask.to(args.device) + lr = get_lr(epoch * iter_per_epoch + step, + args.epochs * iter_per_epoch, + args.learning_rate) + for param_group in optimizer.param_groups: + param_group['lr'] = lr + + # 前向传播(学生模型) + with ctx: + res = model(X) + student_logits = res.logits + + # 教师模型前向传播(只在eval & no_grad) + if teacher_model is not None: + with torch.no_grad(): + teacher_logits = teacher_model(X).logits + vocab_size_student = student_logits.size(-1) # N + teacher_logits = teacher_logits[..., :vocab_size_student] + + # ========== 计算损失 ========== + # 1) Ground-Truth CE Loss(可选) + loss_mask_flat = loss_mask.view(-1) + ce_loss = F.cross_entropy( + student_logits.view(-1, student_logits.size(-1)), + Y.view(-1), + ignore_index=0, + reduction='none' + ) + ce_loss = torch.sum(ce_loss * loss_mask_flat) / loss_mask_flat.sum() + if lm_config_student.use_moe: + ce_loss += res.aux_loss + + # 2) Distillation Loss(可选) + if teacher_model is not None: + # 只在有效token位置做蒸馏 + distill_loss = distillation_loss_fn( + student_logits.view(-1, student_logits.size(-1))[loss_mask_flat == 1], + teacher_logits.view(-1, teacher_logits.size(-1))[loss_mask_flat == 1], + temperature=temperature + ) + else: + distill_loss = torch.tensor(0.0, device=args.device) + + # 3) 总损失 = alpha * CE + (1-alpha) * Distill + loss = alpha * ce_loss + (1 - alpha) * distill_loss + + scaler.scale(loss).backward() + + if (step + 1) % args.accumulation_steps == 0: + scaler.unscale_(optimizer) + torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + scaler.step(optimizer) + scaler.update() + optimizer.zero_grad(set_to_none=True) + + if step % args.log_interval == 0: + spend_time = time.time() - start_time + Logger( + 'Epoch:[{}/{}]({}/{}) loss:{:.4f} lr:{:.12f} epoch_Time:{}min:'.format( + epoch, + args.epochs - 1, + step, + iter_per_epoch, + loss.item(), + optimizer.param_groups[-1]['lr'], + spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60 + ) + ) + + if (wandb is not None) and (not ddp or dist.get_rank() == 0): + wandb.log({ + "loss": loss.item(), + "ce_loss": ce_loss.item(), + "distill_loss": distill_loss.item() if teacher_model is not None else 0.0, + "lr": optimizer.param_groups[-1]['lr'], + "last-time": spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60 + }) + + if (step + 1) % args.save_interval == 0 and (not ddp or dist.get_rank() == 0): + model.eval() + moe_path = '_moe' if lm_config_student.use_moe else '' + ckp = f'{args.save_dir}/full_dist_{lm_config_student.dim}{moe_path}.pth' + if isinstance(model, torch.nn.parallel.DistributedDataParallel): + state_dict = model.module.state_dict() + else: + state_dict = model.state_dict() + torch.save(state_dict, ckp) + model.train() + + +def init_student_model(lm_config): + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + model = MiniMindLM(lm_config) + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'./out/full_sft_{lm_config.dim}{moe_path}.pth' + state_dict = torch.load(ckp, map_location=args.device) + model.load_state_dict(state_dict, strict=False) + Logger(f'学生模型(LLM)总参数量:{sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.3f} 百万') + model = model.to(args.device) + + return model, tokenizer + + +def init_teacher_model(lm_config): + model = MiniMindLM(lm_config) + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'./out/full_sft_{lm_config.dim}{moe_path}.pth' + state_dict = torch.load(ckp, map_location=args.device) + model.load_state_dict(state_dict, strict=False) + Logger(f'教师模型(LLM)总参数量:{sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.3f} 百万') + model = model.to(args.device) + return model + + +def init_distributed_mode(): + if not ddp: return + global ddp_local_rank, DEVICE + + dist.init_process_group(backend="nccl") + ddp_rank = int(os.environ["RANK"]) + ddp_local_rank = int(os.environ["LOCAL_RANK"]) + ddp_world_size = int(os.environ["WORLD_SIZE"]) + DEVICE = f"cuda:{ddp_local_rank}" + torch.cuda.set_device(DEVICE) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="MiniMind Full SFT") + parser.add_argument("--out_dir", type=str, default="out") + parser.add_argument("--epochs", type=int, default=6) + parser.add_argument("--batch_size", type=int, default=32) + parser.add_argument("--learning_rate", type=float, default=5e-6) + parser.add_argument("--device", type=str, default="cuda:0" if torch.cuda.is_available() else "cpu") + parser.add_argument("--dtype", type=str, default="bfloat16") + parser.add_argument("--use_wandb", action="store_true") + parser.add_argument("--wandb_project", type=str, default="MiniMind-Full-SFT") + parser.add_argument("--num_workers", type=int, default=1) + parser.add_argument("--ddp", action="store_true") + parser.add_argument("--accumulation_steps", type=int, default=1) + parser.add_argument("--grad_clip", type=float, default=1.0) + parser.add_argument("--warmup_iters", type=int, default=0) + parser.add_argument("--log_interval", type=int, default=100) + parser.add_argument("--save_interval", type=int, default=100) + parser.add_argument('--local_rank', type=int, default=-1) + parser.add_argument("--data_path", type=str, default="./dataset/sft_data.jsonl") + + args = parser.parse_args() + # 定义学生模型和教师模型 + lm_config_student = LMConfig(dim=512, n_layers=8, max_seq_len=512) + lm_config_teacher = LMConfig(dim=768, n_layers=16, max_seq_len=512) + max_seq_len = lm_config_student.max_seq_len + args.save_dir = os.path.join(args.out_dir) + os.makedirs(args.save_dir, exist_ok=True) + os.makedirs(args.out_dir, exist_ok=True) + tokens_per_iter = args.batch_size * max_seq_len + device_type = "cuda" if "cuda" in args.device else "cpu" + + args.wandb_run_name = f"MiniMind-Dist-SFT-Epoch-{args.epochs}-BatchSize-{args.batch_size}-LearningRate-{args.learning_rate}" + + ctx = nullcontext() if device_type == "cpu" else torch.cuda.amp.autocast() + ddp = int(os.environ.get("RANK", -1)) != -1 # is this a ddp run? + ddp_local_rank, DEVICE = 0, "cuda:0" + base_seed = 1337 + torch.manual_seed(base_seed) + torch.cuda.manual_seed(base_seed) + + if ddp: + init_distributed_mode() + args.device = torch.device(DEVICE) + rank = dist.get_rank() + torch.manual_seed(base_seed + rank) + # 同时设置 CUDA 的随机种子 + torch.cuda.manual_seed(base_seed + rank) + + if args.use_wandb and (not ddp or ddp_local_rank == 0): + import wandb + + wandb.init(project=args.wandb_project, name=args.wandb_run_name) + else: + wandb = None + + # 初始化学生模型和教师模型 + model, tokenizer = init_student_model(lm_config_student) + teacher_model = init_teacher_model(lm_config_teacher) + + train_ds = SFTDataset(args.data_path, tokenizer, max_length=max_seq_len) + train_sampler = DistributedSampler(train_ds) if ddp else None + train_loader = DataLoader( + train_ds, + batch_size=args.batch_size, + pin_memory=True, + drop_last=False, + shuffle=False, + num_workers=args.num_workers, + sampler=train_sampler + ) + + scaler = torch.cuda.amp.GradScaler(enabled=(args.dtype in ['float16', 'bfloat16'])) + optimizer = optim.AdamW(model.parameters(), lr=args.learning_rate) + + if ddp: + model._ddp_params_and_buffers_to_ignore = {"pos_cis"} + model = DistributedDataParallel(model, device_ids=[ddp_local_rank]) + + iter_per_epoch = len(train_loader) + for epoch in range(args.epochs): + train_epoch(epoch, wandb) diff --git a/train_dpo.py b/train_dpo.py new file mode 100644 index 0000000..e79dfb5 --- /dev/null +++ b/train_dpo.py @@ -0,0 +1,247 @@ +import os +import platform +import argparse +import time +import math +import warnings + +import pandas as pd +import torch +import torch.nn.functional as F +import torch.distributed as dist +from contextlib import nullcontext + +from torch import optim, nn +from torch.nn.parallel import DistributedDataParallel +from torch.utils.data import DataLoader, DistributedSampler +from transformers import AutoTokenizer, AutoModelForCausalLM +from model.model import MiniMindLM +from model.LMConfig import LMConfig +from model.dataset import DPODataset + +warnings.filterwarnings('ignore') + + +def Logger(content): + if not ddp or dist.get_rank() == 0: + print(content) + + +def get_lr(current_step, total_steps, lr): + return lr / 10 + 0.5 * lr * (1 + math.cos(math.pi * current_step / total_steps)) + + +def logits_to_probs(logits, labels): + # logits shape: (batch_size, seq_len, vocab_size) + # labels shape: (batch_size, seq_len) + # probs shape: (batch_size, seq_len) + log_probs = F.log_softmax(logits, dim=2) + probs = torch.gather(log_probs, dim=2, index=labels.unsqueeze(2)).squeeze(-1) + return probs + + +def dpo_loss(ref_probs, probs, mask, beta): + # ref_probs 和 probs 都是 shape: (batch_size, seq_len) + # https://github.com/jingyaogong/minimind/issues/298 + seq_lengths = mask.sum(dim=1, keepdim=True) # (batch_size, 1) + ref_probs = (ref_probs * mask).sum(dim=1) / seq_lengths.squeeze() + probs = (probs * mask).sum(dim=1) / seq_lengths.squeeze() + + # 将 chosen 和 rejected 数据分开 + batch_size = ref_probs.shape[0] + chosen_ref_probs = ref_probs[:batch_size // 2] + reject_ref_probs = ref_probs[batch_size // 2:] + chosen_probs = probs[:batch_size // 2] + reject_probs = probs[batch_size // 2:] + + pi_logratios = chosen_probs - reject_probs + ref_logratios = chosen_ref_probs - reject_ref_probs + logits = pi_logratios - ref_logratios + loss = -F.logsigmoid(beta * logits) + return loss.mean() + + +def train_epoch(epoch, wandb): + start_time = time.time() + for step, batch in enumerate(train_loader): + x_chosen = batch['x_chosen'].to(args.device) + x_rejected = batch['x_rejected'].to(args.device) + y_chosen = batch['y_chosen'].to(args.device) + y_rejected = batch['y_rejected'].to(args.device) + mask_chosen = batch['mask_chosen'].to(args.device) + mask_rejected = batch['mask_rejected'].to(args.device) + x = torch.cat([x_chosen, x_rejected], dim=0) + y = torch.cat([y_chosen, y_rejected], dim=0) + mask = torch.cat([mask_chosen, mask_rejected], dim=0) + + lr = get_lr(epoch * iter_per_epoch + step, args.epochs * iter_per_epoch, args.learning_rate) + for param_group in optimizer.param_groups: + param_group['lr'] = lr + + with ctx: + with torch.no_grad(): + ref_outputs = ref_model(x) + ref_logits = ref_outputs.logits + ref_probs = logits_to_probs(ref_logits, y) + ref_probs = ref_probs * mask + outputs = model(x) + logits = outputs.logits + probs = logits_to_probs(logits, y) + probs = probs * mask + loss = dpo_loss(ref_probs, probs, mask, beta=0.1) + loss = loss / args.accumulation_steps + + scaler.scale(loss).backward() + + if (step + 1) % args.accumulation_steps == 0: + scaler.unscale_(optimizer) + torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + scaler.step(optimizer) + scaler.update() + optimizer.zero_grad(set_to_none=True) + + if step % args.log_interval == 0: + spend_time = time.time() - start_time + Logger( + 'Epoch:[{}/{}]({}/{}) loss:{:.3f} lr:{:.12f} epoch_Time:{}min:'.format( + epoch + 1, + args.epochs, + step, + iter_per_epoch, + loss.item(), + optimizer.param_groups[-1]['lr'], + spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60)) + + if (wandb is not None) and (not ddp or dist.get_rank() == 0): + wandb.log({"loss": loss, + "lr": optimizer.param_groups[-1]['lr'], + "epoch_Time": spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60}) + + if (step + 1) % args.save_interval == 0 and (not ddp or dist.get_rank() == 0): + model.eval() + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'{args.save_dir}/rlhf_{lm_config.dim}{moe_path}.pth' + + if isinstance(model, torch.nn.parallel.DistributedDataParallel): + state_dict = model.module.state_dict() + else: + state_dict = model.state_dict() + + torch.save(state_dict, ckp) + model.train() + + +def init_model(lm_config): + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + model = MiniMindLM(lm_config) + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'./out/full_sft_{lm_config.dim}{moe_path}.pth' + state_dict = torch.load(ckp, map_location=args.device) + model.load_state_dict(state_dict, strict=False) + # 初始化参考模型 + ref_model = MiniMindLM(lm_config) + ref_model.load_state_dict(state_dict, strict=False) + ref_model.eval() + ref_model.requires_grad_(False) + + Logger(f'LLM总参数量:{sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.3f} 百万') + model = model.to(args.device) + ref_model = ref_model.to(args.device) + + return model, ref_model, tokenizer + + +def init_distributed_mode(): + if not ddp: return + global ddp_local_rank, DEVICE + + dist.init_process_group(backend="nccl") + ddp_rank = int(os.environ["RANK"]) + ddp_local_rank = int(os.environ["LOCAL_RANK"]) + ddp_world_size = int(os.environ["WORLD_SIZE"]) + DEVICE = f"cuda:{ddp_local_rank}" + torch.cuda.set_device(DEVICE) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="MiniMind RLHF") + parser.add_argument("--out_dir", type=str, default="out") + parser.add_argument("--epochs", type=int, default=2) + parser.add_argument("--batch_size", type=int, default=8) + # sft阶段学习率为 「5e-6」->「5e-7」长度512,建议离线正负样本「概率」偏好对齐阶段lr <=「1e-8」长度3000,否则很容易遗忘训坏 + parser.add_argument("--learning_rate", type=float, default=1e-8) + parser.add_argument("--device", type=str, default="cuda:0" if torch.cuda.is_available() else "cpu") + parser.add_argument("--dtype", type=str, default="bfloat16") + parser.add_argument("--use_wandb", action="store_true") + parser.add_argument("--wandb_project", type=str, default="MiniMind-RLHF-SFT") + parser.add_argument("--num_workers", type=int, default=1) + parser.add_argument("--ddp", action="store_true") + parser.add_argument("--accumulation_steps", type=int, default=1) + parser.add_argument("--grad_clip", type=float, default=1.0) + parser.add_argument("--warmup_iters", type=int, default=0) + parser.add_argument("--log_interval", type=int, default=100) + parser.add_argument("--save_interval", type=int, default=100) + parser.add_argument('--local_rank', type=int, default=-1) + parser.add_argument('--dim', default=512, type=int) + parser.add_argument('--n_layers', default=8, type=int) + parser.add_argument('--max_seq_len', default=1024, type=int) + parser.add_argument('--use_moe', default=False, type=bool) + parser.add_argument("--data_path", type=str, default="./dataset/dpo.jsonl") + + args = parser.parse_args() + + lm_config = LMConfig(dim=args.dim, n_layers=args.n_layers, max_seq_len=args.max_seq_len, use_moe=args.use_moe) + args.save_dir = os.path.join(args.out_dir) + os.makedirs(args.save_dir, exist_ok=True) + os.makedirs(args.out_dir, exist_ok=True) + tokens_per_iter = args.batch_size * lm_config.max_seq_len + device_type = "cuda" if "cuda" in args.device else "cpu" + + args.wandb_run_name = f"MiniMind-Full-DPO-Epoch-{args.epochs}-BatchSize-{args.batch_size}-LearningRate-{args.learning_rate}" + + ctx = nullcontext() if device_type == "cpu" else torch.cuda.amp.autocast() + ddp = int(os.environ.get("RANK", -1)) != -1 # is this a ddp run? + ddp_local_rank, DEVICE = 0, "cuda:0" + base_seed = 1337 + torch.manual_seed(base_seed) + torch.cuda.manual_seed(base_seed) + + if ddp: + init_distributed_mode() + args.device = torch.device(DEVICE) + rank = dist.get_rank() + torch.manual_seed(base_seed + rank) + # 同时设置 CUDA 的随机种子 + torch.cuda.manual_seed(base_seed + rank) + + if args.use_wandb and (not ddp or ddp_local_rank == 0): + import wandb + + wandb.init(project=args.wandb_project, name=args.wandb_run_name) + else: + wandb = None + + model, ref_model, tokenizer = init_model(lm_config) + + train_ds = DPODataset(args.data_path, tokenizer, max_length=lm_config.max_seq_len) + train_sampler = DistributedSampler(train_ds) if ddp else None + train_loader = DataLoader( + train_ds, + batch_size=args.batch_size, + pin_memory=True, + drop_last=False, + shuffle=False, + num_workers=args.num_workers, + sampler=train_sampler + ) + + scaler = torch.cuda.amp.GradScaler(enabled=(args.dtype in ['float16', 'bfloat16'])) + optimizer = optim.AdamW(model.parameters(), lr=args.learning_rate) + + if ddp: + model._ddp_params_and_buffers_to_ignore = {"pos_cis"} + model = DistributedDataParallel(model, device_ids=[ddp_local_rank]) + + iter_per_epoch = len(train_loader) + for epoch in range(args.epochs): + train_epoch(epoch, wandb) diff --git a/train_embedding.py b/train_embedding.py new file mode 100644 index 0000000..7a4493d --- /dev/null +++ b/train_embedding.py @@ -0,0 +1,418 @@ +import os +# 设置环境变量 +os.environ["WANDB_MODE"] = "offline" # 或者使用 "dryrun" +import platform +import argparse +import time +import math +import warnings +import pandas as pd +import torch +import torch.distributed as dist +from torch import optim, nn +from torch.nn.parallel import DistributedDataParallel +from torch.optim.lr_scheduler import CosineAnnealingLR +from torch.utils.data import DataLoader, DistributedSampler, Dataset +from contextlib import nullcontext +import random +import numpy as np +import json + +from transformers import AutoTokenizer + +# Removed: from model.model import MiniMindLM +from model.LMConfig import LMConfig +# from model.dataset import PretrainDataset + +warnings.filterwarnings('ignore') + + +# Define a Word2Vec-style CBOW model +class CBOWModel(nn.Module): + def __init__(self, config: LMConfig): + super().__init__() + self.vocab_size = config.vocab_size + self.embedding_dim = config.dim + + # Input embeddings (context words) + self.embeddings = nn.Embedding(config.vocab_size, config.dim) + + # Output weights for target prediction + self.output_weights = nn.Linear(config.dim, config.vocab_size, bias=False) + + # Initialize weights + self.init_weights() + + def init_weights(self): + # Xavier initialization for better convergence + nn.init.xavier_uniform_(self.embeddings.weight) + nn.init.xavier_uniform_(self.output_weights.weight) + + def forward(self, context_words): + # context_words shape: [batch_size, context_size],context_size可变 + + # Get embeddings for all context words + embeds = self.embeddings(context_words) # [batch_size, context_size, embedding_dim] + + # Average the context word embeddings along context dimension + embeds = torch.mean(embeds, dim=1) # [batch_size, embedding_dim] + + # Predict the target word + output = self.output_weights(embeds) # [batch_size, vocab_size] + + return output + + +# Word2Vec CBOW dataset +class CBOWDataset(Dataset): + def __init__(self, data_path, tokenizer, max_length=512, window_size=5): + super().__init__() + self.tokenizer = tokenizer + self.window_size = window_size + self.max_length = max_length + self.samples = self.load_data(data_path) + + def load_data(self, path): + samples = [] + with open(path, 'r', encoding='utf-8') as f: + for line_num, line in enumerate(f, 1): + data = json.loads(line.strip()) + samples.append(data) + return samples + + def __len__(self): + return len(self.samples) + + def __getitem__(self, index): + sample = self.samples[index] + + # 构建输入文本 + text = f"{self.tokenizer.bos_token}{str(sample['text'])}{self.tokenizer.eos_token}" + encoding = self.tokenizer( + text, + max_length=self.max_length, + padding='max_length', + truncation=True, + return_tensors='pt' + ) + + # 获取token ids + input_ids = encoding.input_ids.squeeze() + # 过滤掉padding + attention_mask = encoding.attention_mask.squeeze() + valid_indices = torch.where(attention_mask == 1)[0] + valid_input_ids = input_ids[valid_indices] + + # 确保有足够的token进行CBOW训练 + if len(valid_input_ids) <= 2 * self.window_size + 1: + # 如果token不足,随机选择一个不同的样本 + return self.__getitem__(random.randint(0, len(self.samples) - 1)) + + # 随机选择一个中心位置(不包括首尾的特殊token) + # 确保中心位置两边都有至少window_size个token + min_center_pos = self.window_size + 1 # 避开起始token + max_center_pos = len(valid_input_ids) - self.window_size - 1 # 避开结束token + + if max_center_pos <= min_center_pos: + return self.__getitem__(random.randint(0, len(self.samples) - 1)) + + center_pos = random.randint(min_center_pos, max_center_pos) + + # 目标词(中心词) + target = valid_input_ids[center_pos].unsqueeze(0) + + # 上下文词(中心词前后的词) + context = torch.cat([ + valid_input_ids[center_pos - self.window_size:center_pos], + valid_input_ids[center_pos + 1:center_pos + self.window_size + 1] + ]) + + return context, target + + +def Logger(content): + # 如果没有使用ddp或者ddp的主设备,那么就打印 + if not ddp or dist.get_rank() == 0: + print(content) + + +def get_lr(current_step, total_steps, lr): + # 更新学习率 + # \text{get\_lr}(c, t, l) = \frac{l}{10} + 0.5 \cdot l \cdot \left(1 + \cos\left(\frac{\pi \cdot c}{t}\right)\right) + return lr / 10 + 0.5 * lr * (1 + math.cos(math.pi * current_step / total_steps)) + + +def train_epoch(epoch, wandb): + loss_fct = nn.CrossEntropyLoss() + start_time = time.time() + total_loss = 0 + total_samples = 0 + + for step, (context, target) in enumerate(train_loader): + try: + # 将数据加载到设备上 + context = context.to(args.device) + target = target.to(args.device) + + # 更新学习率 + lr = get_lr(epoch * iter_per_epoch + step, args.epochs * iter_per_epoch, args.learning_rate) + for param_group in optimizer.param_groups: + param_group['lr'] = lr + + with ctx: + # Forward pass + logits = model(context) # [batch_size, vocab_size] + # target是[batch_size, 1],需要squeeze成[batch_size]来匹配CrossEntropyLoss的预期 + loss = loss_fct(logits, target.squeeze()) + loss = loss / args.accumulation_steps + + # Print data types for debugging + if step == 0 and (not ddp or dist.get_rank() == 0): + Logger("---- Data Type Check ----") + Logger(f"context.dtype: {context.dtype}") + Logger(f"context.shape: {context.shape}") + Logger(f"target.dtype: {target.dtype}") + Logger(f"target.shape: {target.shape}") + if hasattr(model, 'module'): # DDP case + Logger(f"Model parameter dtype: {next(model.module.parameters()).dtype}") + else: # Non-DDP case + Logger(f"Model parameter dtype: {next(model.parameters()).dtype}") + Logger(f"logits.dtype: {logits.dtype}") + Logger(f"logits.shape: {logits.shape}") + Logger(f"loss.dtype: {loss.dtype}") + Logger("-------------------------") + + scaler.scale(loss).backward() + + if (step + 1) % args.accumulation_steps == 0: + scaler.unscale_(optimizer) + torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + scaler.step(optimizer) + scaler.update() + + optimizer.zero_grad(set_to_none=True) + + total_loss += loss.item() * args.accumulation_steps + total_samples += 1 + + # 打印日志 + if step % args.log_interval == 0: + spend_time = time.time() - start_time + avg_loss = total_loss / total_samples if total_samples > 0 else 0 + Logger( + 'Epoch:[{}/{}]({}/{}) loss:{:.3f} lr:{:.12f} epoch_Time:{}min:'.format( + epoch + 1, + args.epochs, + step, + iter_per_epoch, + avg_loss, + optimizer.param_groups[-1]['lr'], + spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60)) + + if (wandb is not None) and (not ddp or dist.get_rank() == 0): + wandb.log({"loss": avg_loss, + "lr": optimizer.param_groups[-1]['lr'], + "epoch_Time": spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60}) + + except Exception as e: + print(f"Error occurred: {str(e)}") + import traceback + traceback.print_exc() + # Modified checkpoint path for error + save_path = f'{args.save_dir}/word2vec_embedding_dim{lm_config.dim}_vocab{lm_config.vocab_size}_ERROR.pth' + if os.path.exists(save_path): + os.remove(save_path) + + if isinstance(model, torch.nn.parallel.DistributedDataParallel): + state_dict = model.module.embeddings.state_dict() + else: + state_dict = model.embeddings.state_dict() + torch.save(state_dict, save_path) + + for name, param in model.named_parameters(): + if param.grad is not None and torch.isnan(param.grad).any(): + print(f"NaN gradient in parameter: {name}") + + for name, param in model.named_parameters(): + if param.grad is not None and torch.isnan(param.grad).any(): + print(f"Parameter {name} values: {param.data}") + print(f"Parameter {name} gradients: {param.grad}") + + raise ValueError("NaN gradient detected") + + # Save model once at the end of each epoch + if not ddp or dist.get_rank() == 0: + model.eval() + ckp = f'{args.save_dir}/word2vec_embedding_dim{lm_config.dim}_vocab{lm_config.vocab_size}_epoch{epoch+1}.pth' + + if isinstance(model, torch.nn.parallel.DistributedDataParallel): + embedding_state_dict = model.module.embeddings.state_dict() + else: + embedding_state_dict = model.embeddings.state_dict() + + torch.save(embedding_state_dict, ckp) + Logger(f"Saved word2vec embedding for epoch {epoch+1} to {ckp}") + model.train() + + +def init_model(lm_config_params: LMConfig): + # 加载tokenizer + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + # Update vocab_size in lm_config if tokenizer has a different one + if tokenizer.vocab_size != lm_config_params.vocab_size: + Logger(f"Updating lm_config.vocab_size from {lm_config_params.vocab_size} to {tokenizer.vocab_size} based on tokenizer.") + lm_config_params.vocab_size = tokenizer.vocab_size + + # 加载word2vec CBOW模型 + model = CBOWModel(lm_config_params).to(args.device) + # 打印模型参数 + Logger(f'CBOW Model total parameters: {sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.3f} Million') + return model, tokenizer + + +def init_distributed_mode(): + if not ddp: return #如果没有启用分布式数据并行(DDP),直接返回,不执行任何操作。 + global ddp_local_rank, DEVICE #声明这两个变量为全局变量,以便在函数外部也能访问它们。 + + dist.init_process_group(backend="nccl") #初始化分布式进程组,使用NCCL后端(NVIDIA Collective Communications Library),这是NVIDIA GPU之间通信的优化库。 + ddp_rank = int(os.environ["RANK"]) #从环境变量获取当前进程的全局编号。 + ddp_local_rank = int(os.environ["LOCAL_RANK"]) #从环境变量获取当前进程的本地编号。 + ddp_world_size = int(os.environ["WORLD_SIZE"]) #从环境变量获取当前进程组中的进程总数。 + DEVICE = f"cuda:{ddp_local_rank}" #根据本地编号选择GPU设备。 + torch.cuda.set_device(DEVICE) #设置当前进程的GPU设备。 + + +# torchrun --nproc_per_node 2 train_embedding.py +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="MiniMind Word2Vec Embedding Training") + parser.add_argument("--out_dir", type=str, default="out_word2vec") + parser.add_argument("--epochs", type=int, default=3) + parser.add_argument("--batch_size", type=int, default=256) + parser.add_argument("--learning_rate", type=float, default=5e-4) + parser.add_argument("--device", type=str, default="cuda:0" if torch.cuda.is_available() else "cpu") + parser.add_argument("--dtype", type=str, default="bfloat16") + parser.add_argument("--use_wandb", default=False, action="store_true") + parser.add_argument("--wandb_project", type=str, default="MiniMind-Word2Vec-Training") + parser.add_argument("--num_workers", type=int, default=32) + parser.add_argument("--ddp", action="store_true") + parser.add_argument("--accumulation_steps", type=int, default=8) + parser.add_argument("--grad_clip", type=float, default=1.0) + parser.add_argument("--log_interval", type=int, default=100) + parser.add_argument("--save_interval", type=int, default=100) + parser.add_argument('--local_rank', type=int, default=-1) + parser.add_argument('--dim', default=768, type=int) + parser.add_argument('--max_seq_len', default=512, type=int) + parser.add_argument("--data_path", type=str, default="./dataset/pretrain_hq.jsonl") + parser.add_argument('--vocab_size', default=6400, type=int) + parser.add_argument('--window_size', default=5, type=int) + + + args = parser.parse_args() + + # Create LMConfig with relevant parameters for embedding + lm_config = LMConfig( + dim=args.dim, + vocab_size=args.vocab_size, # Will be updated by tokenizer + max_seq_len=args.max_seq_len, + n_layers=1, # Minimal + n_heads=1, # Minimal + n_kv_heads=1 #Minimal + ) + args.save_dir = os.path.join(args.out_dir) + os.makedirs(args.save_dir, exist_ok=True) + os.makedirs(args.out_dir, exist_ok=True) + tokens_per_iter = args.batch_size * lm_config.max_seq_len + print(f"tokens_per_iter: {tokens_per_iter}") + device_type = "cuda" if "cuda" in args.device else "cpu" + + # Determine the torch dtype + pt_dtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + + args.wandb_run_name = f"MiniMind-Word2Vec-Dim-{args.dim}-Vocab-{lm_config.vocab_size}-Window-{args.window_size}" + + ctx = nullcontext() if device_type == "cpu" else torch.cuda.amp.autocast(dtype=pt_dtype) + + ddp = int(os.environ.get("RANK", -1)) != -1 # is this a ddp run? + ddp_local_rank, DEVICE = 0, "cuda:0" # Default values, will be overwritten in DDP + + base_seed = 1337 + torch.manual_seed(base_seed) + torch.cuda.manual_seed(base_seed) + + if ddp: + init_distributed_mode() # This sets DEVICE and ddp_local_rank + args.device = torch.device(DEVICE) # Ensure args.device is updated + rank = dist.get_rank() + torch.manual_seed(base_seed + rank) + # 同时设置 CUDA 的随机种子 + torch.cuda.manual_seed_all(base_seed + rank) # Use seed_all for DDP + + if args.use_wandb and (not ddp or dist.get_rank() == 0): # Check rank for DDP wandb init + import wandb + + wandb.init(project=args.wandb_project, name=args.wandb_run_name, config=args) + else: + wandb = None + + model, tokenizer = init_model(lm_config) # Pass the lm_config instance + + # Update lm_config vocab_size again after tokenizer to ensure consistency for save path name + if lm_config.vocab_size != tokenizer.vocab_size: + lm_config.vocab_size = tokenizer.vocab_size + args.wandb_run_name = f"MiniMind-Word2Vec-Dim-{args.dim}-Vocab-{lm_config.vocab_size}-Window-{args.window_size}" + if wandb is not None and (not ddp or dist.get_rank() == 0): + wandb.config.update({'vocab_size': lm_config.vocab_size, 'wandb_run_name': args.wandb_run_name}, allow_val_change=True) + + # 添加collate函数处理不同长度的序列 + def collate_cbow_batch(batch): + # 提取context和target + contexts, targets = zip(*batch) + + # 获取当前批次中最长的context长度 + max_len = max([ctx.size(0) for ctx in contexts]) + + # 创建填充后的tensor + padded_contexts = torch.zeros(len(contexts), max_len, dtype=torch.long) + + # 填充每个context + for i, ctx in enumerate(contexts): + ctx_len = ctx.size(0) + padded_contexts[i, :ctx_len] = ctx + + # 将targets stack成一个tensor + stacked_targets = torch.stack(targets) + + return padded_contexts, stacked_targets + + # Create Word2Vec CBOW dataset + train_ds = CBOWDataset(args.data_path, tokenizer, max_length=lm_config.max_seq_len, window_size=args.window_size) + train_sampler = DistributedSampler(train_ds, shuffle=True, seed=base_seed) if ddp else None + train_loader = DataLoader( + train_ds, + batch_size=args.batch_size, + pin_memory=True, + drop_last=True, + shuffle=(train_sampler is None), + num_workers=args.num_workers, + sampler=train_sampler, + collate_fn=collate_cbow_batch + ) + + scaler = torch.cuda.amp.GradScaler(enabled=(args.dtype in ['float16', 'bfloat16'])) + optimizer = optim.AdamW(model.parameters(), lr=args.learning_rate) + + if ddp: + model = DistributedDataParallel(model, device_ids=[ddp_local_rank]) + + iter_per_epoch = len(train_loader) + Logger(f"Starting Word2Vec CBOW training for {args.epochs} epochs with {iter_per_epoch} iterations per epoch.") + for epoch in range(args.epochs): + if ddp: + train_sampler.set_epoch(epoch) + train_epoch(epoch, wandb) + + if wandb is not None and (not ddp or dist.get_rank() == 0): + wandb.finish() + + Logger("Word2Vec embedding training finished.") \ No newline at end of file diff --git a/train_full_sft.py b/train_full_sft.py new file mode 100644 index 0000000..fa8bb5b --- /dev/null +++ b/train_full_sft.py @@ -0,0 +1,214 @@ +import os +# 设置环境变量 +os.environ["WANDB_MODE"] = "offline" # 或者使用 "dryrun" +import platform +import argparse +import time +import math +import warnings + +import pandas as pd +import torch +import torch.nn.functional as F +import torch.distributed as dist +from contextlib import nullcontext + +from torch import optim, nn +from torch.nn.parallel import DistributedDataParallel +from torch.utils.data import DataLoader, DistributedSampler +from transformers import AutoTokenizer, AutoModelForCausalLM +from model.model import MiniMindLM +from model.LMConfig import LMConfig +from model.dataset import SFTDataset + + +warnings.filterwarnings('ignore') + +# 日志记录函数,用于打印训练信息。 +def Logger(content): + if not ddp or dist.get_rank() == 0: + print(content) + +# 学习率计算函数,用于计算当前学习率。 +def get_lr(current_step, total_steps, lr): + return lr / 10 + 0.5 * lr * (1 + math.cos(math.pi * current_step / total_steps)) + +# 训练一个epoch的函数,用于训练模型。 +def train_epoch(epoch, wandb): + loss_fct = nn.CrossEntropyLoss(reduction='none') #交叉熵损失函数,用于计算损失。 + start_time = time.time() + for step, (X, Y, loss_mask) in enumerate(train_loader): + # 将数据移动到指定设备。 + X = X.to(args.device) + Y = Y.to(args.device) + loss_mask = loss_mask.to(args.device) + # 计算当前学习率。 + lr = get_lr(epoch * iter_per_epoch + step, args.epochs * iter_per_epoch, args.learning_rate) + # 更新学习率。 + for param_group in optimizer.param_groups: + param_group['lr'] = lr + + with ctx: + res = model(X) #获取输出 + loss = loss_fct( + res.logits.view(-1, res.logits.size(-1)), + Y.view(-1) + ).view(Y.size()) #计算损失 + + # 计算损失 + loss = (loss * loss_mask).sum() / loss_mask.sum() + loss += res.aux_loss + loss = loss / args.accumulation_steps + + scaler.scale(loss).backward() #用于处理混合精度训练。它的作用是自动缩放损失值,以防止在使用低精度(如 FP16)计算时出现数值不稳定的问题。 + + if (step + 1) % args.accumulation_steps == 0: + scaler.unscale_(optimizer) #PyTorch 自动混合精度(AMP)训练的一部分。它"反缩放"之前为防止在混合精度训练中出现下溢而缩放的梯度。 + torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) #应用梯度裁剪以防止梯度爆炸。它会缩放梯度,使其范数不超过args.grad_clip。 + + scaler.step(optimizer) #使用优化器更新模型权重,但由缩放器控制以适应混合精度训练。 + scaler.update() #根据本次迭代是否有梯度溢出来更新下一次迭代的缩放因子。 + + optimizer.zero_grad(set_to_none=True) #清空梯度。 + + # 如果达到日志记录间隔,则记录日志。 + if step % args.log_interval == 0: + spend_time = time.time() - start_time + Logger( + 'Epoch:[{}/{}]({}/{}) loss:{:.3f} lr:{:.12f} epoch_Time:{}min:'.format( + epoch + 1, + args.epochs, + step, + iter_per_epoch, + loss.item(), + optimizer.param_groups[-1]['lr'], + spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60)) + + if (wandb is not None) and (not ddp or dist.get_rank() == 0): + wandb.log({"loss": loss, + "lr": optimizer.param_groups[-1]['lr'], + "epoch_Time": spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60}) + + if (step + 1) % args.save_interval == 0 and (not ddp or dist.get_rank() == 0): + model.eval() + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'{args.save_dir}/full_sft_{lm_config.dim}{moe_path}.pth' + + if isinstance(model, torch.nn.parallel.DistributedDataParallel): + state_dict = model.module.state_dict() + else: + state_dict = model.state_dict() + + torch.save(state_dict, ckp) + model.train() + +# 初始化模型函数,用于初始化模型。 +def init_model(lm_config): + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + model = MiniMindLM(lm_config) + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'./out/pretrain_{lm_config.dim}{moe_path}.pth' + state_dict = torch.load(ckp, map_location=args.device) + model.load_state_dict(state_dict, strict=False) + Logger(f'LLM总参数量:{sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.3f} 百万') + model = model.to(args.device) + return model, tokenizer + +# 初始化分布式模式函数,用于初始化分布式模式。 +def init_distributed_mode(): + if not ddp: return + global ddp_local_rank, DEVICE + + dist.init_process_group(backend="nccl") + ddp_rank = int(os.environ["RANK"]) + ddp_local_rank = int(os.environ["LOCAL_RANK"]) + ddp_world_size = int(os.environ["WORLD_SIZE"]) + DEVICE = f"cuda:{ddp_local_rank}" + torch.cuda.set_device(DEVICE) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="MiniMind Full SFT") + parser.add_argument("--out_dir", type=str, default="out") + parser.add_argument("--epochs", type=int, default=3) + parser.add_argument("--batch_size", type=int, default=32) + parser.add_argument("--learning_rate", type=float, default=5e-5) + parser.add_argument("--device", type=str, default="cuda:0" if torch.cuda.is_available() else "cpu") + parser.add_argument("--dtype", type=str, default="bfloat16") + parser.add_argument("--use_wandb", default=True, action="store_true") + parser.add_argument("--wandb_project", type=str, default="MiniMind-Full-SFT") + parser.add_argument("--num_workers", type=int, default=1) + parser.add_argument("--ddp", action="store_true") + parser.add_argument("--accumulation_steps", type=int, default=1) + parser.add_argument("--grad_clip", type=float, default=1.0) + parser.add_argument("--warmup_iters", type=int, default=0) + parser.add_argument("--log_interval", type=int, default=100) + parser.add_argument("--save_interval", type=int, default=100) + parser.add_argument('--local_rank', type=int, default=-1) + parser.add_argument('--dim', default=1024, type=int) #模型维度,用于控制模型的大小。 + parser.add_argument('--n_layers', default=24, type=int) #层数,用于控制模型层数。 + parser.add_argument('--max_seq_len', default=1024, type=int) #最大序列长度,用于控制输入序列的最大长度。 + parser.add_argument('--use_moe', default=False, type=bool) + parser.add_argument("--data_path", type=str, default="./dataset/sft_1024.jsonl") + + args = parser.parse_args() + + lm_config = LMConfig(dim=args.dim, n_layers=args.n_layers, max_seq_len=args.max_seq_len, use_moe=args.use_moe) + args.save_dir = os.path.join(args.out_dir) + os.makedirs(args.save_dir, exist_ok=True) + os.makedirs(args.out_dir, exist_ok=True) + tokens_per_iter = args.batch_size * lm_config.max_seq_len + device_type = "cuda" if "cuda" in args.device else "cpu" + + args.wandb_run_name = f"MiniMind-Full-SFT-Epoch-{args.epochs}-BatchSize-{args.batch_size}-LearningRate-{args.learning_rate}" + + ctx = nullcontext() if device_type == "cpu" else torch.cuda.amp.autocast() + ddp = int(os.environ.get("RANK", -1)) != -1 # is this a ddp run? + ddp_local_rank, DEVICE = 0, "cuda:0" + base_seed = 1337 + torch.manual_seed(base_seed) + torch.cuda.manual_seed(base_seed) + + # 如果使用分布式模式,则初始化分布式模式。 + if ddp: + init_distributed_mode() + args.device = torch.device(DEVICE) + rank = dist.get_rank() + torch.manual_seed(base_seed + rank) + # 同时设置 CUDA 的随机种子 + torch.cuda.manual_seed(base_seed + rank) + + # 如果使用WandB,则初始化WandB。 + if args.use_wandb and (not ddp or ddp_local_rank == 0): + import wandb + + wandb.init(project=args.wandb_project, name=args.wandb_run_name) + else: + wandb = None + + # 初始化模型。 + model, tokenizer = init_model(lm_config) + + # 初始化数据集。 + train_ds = SFTDataset(args.data_path, tokenizer, max_length=lm_config.max_seq_len) + train_sampler = DistributedSampler(train_ds) if ddp else None + train_loader = DataLoader( + train_ds, + batch_size=args.batch_size, + pin_memory=True, + drop_last=False, + shuffle=False, + num_workers=args.num_workers, + sampler=train_sampler + ) + + scaler = torch.cuda.amp.GradScaler(enabled=(args.dtype in ['float16', 'bfloat16'])) #创建一个梯度缩放器(GradScaler),用于混合精度训练。当模型使用半精度格式(float16或bfloat16)训练时启用,它帮助防止梯度下溢并提高训练效率。 + optimizer = optim.AdamW(model.parameters(), lr=args.learning_rate) # 创建AdamW优化器实例,负责更新模型参数。它接收模型的所有参数和指定的学习率作为输入。AdamW是Adam优化器的变体,增加了权重衰减的正则化。 + + if ddp: + model._ddp_params_and_buffers_to_ignore = {"pos_cis"} + model = DistributedDataParallel(model, device_ids=[ddp_local_rank]) + + iter_per_epoch = len(train_loader) + for epoch in range(args.epochs): + train_epoch(epoch, wandb) diff --git a/train_lora.py b/train_lora.py new file mode 100644 index 0000000..6f373dd --- /dev/null +++ b/train_lora.py @@ -0,0 +1,201 @@ +import os +import platform +import argparse +import random +import time +import math +import warnings +import torch.distributed as dist +from contextlib import nullcontext +from torch.utils.data import DataLoader, DistributedSampler +from transformers import AutoTokenizer, AutoModelForCausalLM +from model.model import MiniMindLM +from model.LMConfig import LMConfig +from model.dataset import SFTDataset +from model.model_lora import * + +warnings.filterwarnings('ignore') + + +# Logger function +def Logger(content): + if not ddp or dist.get_rank() == 0: + print(content) + + +def get_lr(current_step, total_steps, lr): + return lr / 10 + 0.5 * lr * (1 + math.cos(math.pi * current_step / total_steps)) + + +# 代码和full_sft「几乎」一致 +def train_epoch(epoch, wandb): + loss_fct = nn.CrossEntropyLoss(reduction='none') + start_time = time.time() + for step, (X, Y, loss_mask) in enumerate(train_loader): + X = X.to(args.device) + Y = Y.to(args.device) + loss_mask = loss_mask.to(args.device) + lr = get_lr(epoch * iter_per_epoch + step, args.epochs * iter_per_epoch, args.learning_rate) + for param_group in optimizer.param_groups: + param_group['lr'] = lr + + with ctx: + res = model(X) + loss = loss_fct( + res.logits.view(-1, res.logits.size(-1)), + Y.view(-1) + ).view(Y.size()) + loss = (loss * loss_mask).sum() / loss_mask.sum() + loss += res.aux_loss + loss = loss / args.accumulation_steps + + scaler.scale(loss).backward() + + if (step + 1) % args.accumulation_steps == 0: + scaler.unscale_(optimizer) + torch.nn.utils.clip_grad_norm_(lora_params, args.grad_clip) + + scaler.step(optimizer) + scaler.update() + + optimizer.zero_grad(set_to_none=True) + + if step % args.log_interval == 0: + spend_time = time.time() - start_time + Logger( + 'Epoch:[{}/{}]({}/{}) loss:{:.3f} lr:{:.12f} epoch_Time:{}min:'.format( + epoch + 1, + args.epochs, + step, + iter_per_epoch, + loss.item(), + optimizer.param_groups[-1]['lr'], + spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60)) + + if (wandb is not None) and (not ddp or dist.get_rank() == 0): + wandb.log({"loss": loss, + "lr": optimizer.param_groups[-1]['lr'], + "epoch_Time": spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60}) + + if (step + 1) % args.save_interval == 0 and (not ddp or dist.get_rank() == 0): + model.eval() + # 【区别1】只保存lora权重即可 + save_lora(model, f'{args.save_dir}/lora/{args.lora_name}_{lm_config.dim}.pth') + model.train() + + +def init_model(lm_config): + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + model = MiniMindLM(lm_config) + moe_path = '_moe' if lm_config.use_moe else '' + ckp = f'./out/rlhf_{lm_config.dim}{moe_path}.pth' + state_dict = torch.load(ckp, map_location=args.device) + model.load_state_dict(state_dict, strict=False) + return model.to(args.device), tokenizer + + +def init_distributed_mode(): + if not ddp: return + global ddp_local_rank, DEVICE + + dist.init_process_group(backend="nccl") + ddp_rank = int(os.environ["RANK"]) + ddp_local_rank = int(os.environ["LOCAL_RANK"]) + ddp_world_size = int(os.environ["WORLD_SIZE"]) + DEVICE = f"cuda:{ddp_local_rank}" + torch.cuda.set_device(DEVICE) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="MiniMind SFT with LoRA") + parser.add_argument("--out_dir", type=str, default="out") + parser.add_argument("--epochs", type=int, default=50) + parser.add_argument("--batch_size", type=int, default=16) + parser.add_argument("--learning_rate", type=float, default=5e-5) + parser.add_argument("--device", type=str, default="cuda:0" if torch.cuda.is_available() else "cpu") + parser.add_argument("--dtype", type=str, default="bfloat16") + parser.add_argument("--use_wandb", action="store_true") + parser.add_argument("--wandb_project", type=str, default="MiniMind-LoRA-SFT") + parser.add_argument("--num_workers", type=int, default=1) + parser.add_argument("--ddp", action="store_true") + parser.add_argument("--accumulation_steps", type=int, default=1) + parser.add_argument("--grad_clip", type=float, default=1.0) + parser.add_argument("--warmup_iters", type=int, default=0) + parser.add_argument("--log_interval", type=int, default=100) + parser.add_argument("--save_interval", type=int, default=1) + parser.add_argument('--local_rank', type=int, default=-1) + parser.add_argument('--dim', default=512, type=int) + parser.add_argument('--n_layers', default=8, type=int) + parser.add_argument('--max_seq_len', default=512, type=int) + parser.add_argument('--use_moe', default=False, type=bool) + parser.add_argument("--data_path", type=str, default="./dataset/lora_identity.jsonl") + parser.add_argument("--lora_name", type=str, default="lora_identity", help="根据任务保存成lora_(英文/医学/心理...)") + args = parser.parse_args() + + lm_config = LMConfig(dim=args.dim, n_layers=args.n_layers, max_seq_len=args.max_seq_len, use_moe=args.use_moe) + args.save_dir = os.path.join(args.out_dir) + os.makedirs(args.save_dir, exist_ok=True) + os.makedirs(args.out_dir, exist_ok=True) + tokens_per_iter = args.batch_size * lm_config.max_seq_len + device_type = "cuda" if "cuda" in args.device else "cpu" + + ctx = nullcontext() if device_type == "cpu" else torch.cuda.amp.autocast() + ddp = int(os.environ.get("RANK", -1)) != -1 # is this a ddp run? + ddp_local_rank, DEVICE = 0, "cuda:0" + base_seed = 1337 + torch.manual_seed(base_seed) + torch.cuda.manual_seed(base_seed) + + if ddp: + init_distributed_mode() + args.device = torch.device(DEVICE) + rank = dist.get_rank() + torch.manual_seed(base_seed + rank) + # 同时设置 CUDA 的随机种子 + torch.cuda.manual_seed(base_seed + rank) + + args.wandb_run_name = f"MiniMind-Lora-SFT-Epoch-{args.epochs}-BatchSize-{args.batch_size}-LearningRate-{args.learning_rate}" + if args.use_wandb and (not ddp or ddp_local_rank == 0): + import wandb + + wandb.init(project=args.wandb_project, name=args.wandb_run_name) + else: + wandb = None + + model, tokenizer = init_model(lm_config) + apply_lora(model) + + total_params = sum(p.numel() for p in model.parameters()) # 总参数数量 + lora_params_count = sum(p.numel() for name, p in model.named_parameters() if 'lora' in name) # LoRA 参数数量 + if not ddp or dist.get_rank() == 0: + print(f"LLM 总参数量: {total_params}") + print(f"LoRA 参数量: {lora_params_count}") + print(f"LoRA 参数占比: {lora_params_count / total_params * 100:.2f}%") + + for name, param in model.named_parameters(): + if 'lora' not in name: + param.requires_grad = False + lora_params = [] + for name, param in model.named_parameters(): + if 'lora' in name: + lora_params.append(param) + + # 只对 LoRA 参数进行优化 + optimizer = optim.AdamW(lora_params, lr=args.learning_rate) + train_ds = SFTDataset(args.data_path, tokenizer, max_length=lm_config.max_seq_len) + train_sampler = DistributedSampler(train_ds) if ddp else None + train_loader = DataLoader( + train_ds, + batch_size=args.batch_size, + pin_memory=True, + drop_last=False, + shuffle=False, + num_workers=args.num_workers, + sampler=train_sampler + ) + + scaler = torch.cuda.amp.GradScaler(enabled=(args.dtype in ['float16', 'bfloat16'])) + iter_per_epoch = len(train_loader) + + for epoch in range(args.epochs): + train_epoch(epoch, wandb) diff --git a/train_pretrain.py b/train_pretrain.py new file mode 100644 index 0000000..7397984 --- /dev/null +++ b/train_pretrain.py @@ -0,0 +1,441 @@ +import os +# 设置环境变量 +os.environ["WANDB_MODE"] = "offline" # 或者使用 "dryrun" +import platform +import argparse +import time +import math +import warnings +import pandas as pd +import torch +import torch.distributed as dist +from torch import optim, nn +from torch.nn.parallel import DistributedDataParallel +from torch.optim.lr_scheduler import CosineAnnealingLR +from torch.utils.data import DataLoader, DistributedSampler +# 移除通信分析工具导入 +from contextlib import nullcontext +from typing import Optional + +from transformers import AutoTokenizer + +from model.model import MiniMindLM +from model.LMConfig import LMConfig +from model.dataset import PretrainDataset + +warnings.filterwarnings('ignore') + + +def Logger(content): + # 如果没有使用ddp或者ddp的主设备,那么就打印 + if not ddp or dist.get_rank() == 0: + print(content) + + +def get_lr(current_step, total_steps, lr): + # 更新学习率 + # \text{get\_lr}(c, t, l) = \frac{l}{10} + 0.5 \cdot l \cdot \left(1 + \cos\left(\frac{\pi \cdot c}{t}\right)\right) + return lr / 10 + 0.5 * lr * (1 + math.cos(math.pi * current_step / total_steps)) + + +def train_epoch(epoch, wandb): + loss_fct = nn.CrossEntropyLoss(reduction='none') + start_time = time.time() + # 在函数开始处定义moe_path,避免在异常处理中引用未定义变量 + moe_path = '_moe' if lm_config.use_moe else '' + + # 添加CUDA事件来分析性能 + if args.profile and (not ddp or dist.get_rank() == 0): + data_start = torch.cuda.Event(enable_timing=True) + data_end = torch.cuda.Event(enable_timing=True) + forward_start = torch.cuda.Event(enable_timing=True) + forward_end = torch.cuda.Event(enable_timing=True) + backward_start = torch.cuda.Event(enable_timing=True) + backward_end = torch.cuda.Event(enable_timing=True) + optimizer_start = torch.cuda.Event(enable_timing=True) + optimizer_end = torch.cuda.Event(enable_timing=True) + + # 移除CUDA图优化代码 + + # 预取数据 + prefetch_factor = 2 # 预取的批次数 + data_iter = iter(train_loader) + prefetch_batches = [] + + # 预取初始批次 + for _ in range(min(prefetch_factor, len(train_loader))): + try: + batch = next(data_iter) + prefetch_batches.append([t.to(args.device, non_blocking=True) for t in batch]) + except StopIteration: + break + + for step in range(len(train_loader)): + try: + # 计时数据加载 + if args.profile and (not ddp or dist.get_rank() == 0): + data_start.record() + + # 使用预取的数据 + if prefetch_batches: + X, Y, loss_mask = prefetch_batches.pop(0) + else: + # 如果预取队列为空,直接加载 + X, Y, loss_mask = [t.to(args.device) for t in next(data_iter)] + + # 异步预取下一批数据 + if step + prefetch_factor < len(train_loader): + try: + batch = next(data_iter) + prefetch_batches.append([t.to(args.device, non_blocking=True) for t in batch]) + except StopIteration: + pass + + if args.profile and (not ddp or dist.get_rank() == 0): + data_end.record() + + # 更新学习率 + lr = get_lr(epoch * iter_per_epoch + step, args.epochs * iter_per_epoch, args.learning_rate) + for param_group in optimizer.param_groups: + param_group['lr'] = lr + + # 计时前向传播 + if args.profile and (not ddp or dist.get_rank() == 0): + forward_start.record() + + # 常规前向传播 + with ctx: + res = model(X) + loss = loss_fct( + res.logits.view(-1, res.logits.size(-1)), + Y.view(-1) + ).view(Y.size()) + loss = (loss * loss_mask).sum() / loss_mask.sum() + # 添加辅助损失,如果存在的话 + try: + if hasattr(model, 'module'): + # DDP情况 + aux_loss = sum(l.feed_forward.aux_loss for l in model.module.layers + if hasattr(l.feed_forward, 'aux_loss')) + else: + # 非DDP情况 + aux_loss = sum(l.feed_forward.aux_loss for l in model.layers + if hasattr(l.feed_forward, 'aux_loss')) + loss += aux_loss + except Exception as e: + Logger(f"Warning: Could not add auxiliary loss: {e}") + # 如果出错,不添加辅助损失 + loss = loss / args.accumulation_steps + + # 反向传播 + scaler.scale(loss).backward() + + if args.profile and (not ddp or dist.get_rank() == 0): + forward_end.record() + backward_start.record() + + # Print data types for debugging + if step == 0 and (not ddp or dist.get_rank() == 0): # Print only for the first step of the first epoch on the main process + Logger("---- Data Type Check ----") + Logger(f"X.dtype: {X.dtype}") + if hasattr(model, 'module'): # DDP case + Logger(f"Model parameter dtype: {next(model.module.parameters()).dtype}") + else: # Non-DDP case + Logger(f"Model parameter dtype: {next(model.parameters()).dtype}") + Logger(f"res.logits.dtype: {res.logits.dtype}") + Logger(f"loss.dtype: {loss.dtype}") + Logger("-------------------------") + + if args.profile and (not ddp or dist.get_rank() == 0): + backward_end.record() + + # 在每一步都进行性能分析,而不仅仅是在梯度累积完成时 + if (step + 1) % args.profile_interval == 0: + # 记录优化器时间(如果是梯度累积步骤) + if (step + 1) % args.accumulation_steps == 0: + optimizer_start.record() + + # 优化器步骤 + if (step + 1) % args.accumulation_steps == 0: + if args.profile and (not ddp or dist.get_rank() == 0): + if (step + 1) % args.profile_interval != 0: + optimizer_start.record() + + scaler.unscale_(optimizer) + torch.nn.utils.clip_grad_norm_(model.parameters(), args.grad_clip) + + scaler.step(optimizer) + scaler.update() + + optimizer.zero_grad(set_to_none=True) + + if args.profile and (not ddp or dist.get_rank() == 0): + optimizer_end.record() + + # 性能分析输出(每profile_interval步) + if args.profile and (not ddp or dist.get_rank() == 0) and (step + 1) % args.profile_interval == 0: + # 同步CUDA事件以获取准确的计时 + torch.cuda.synchronize() + + # 计算各阶段耗时 + data_time = data_start.elapsed_time(data_end) + forward_time = forward_start.elapsed_time(forward_end) + backward_time = backward_start.elapsed_time(backward_end) + + # 只有在梯度累积步骤完成时才有优化器时间 + if (step + 1) % args.accumulation_steps == 0: + optimizer_time = optimizer_start.elapsed_time(optimizer_end) + total_compute_time = forward_time + backward_time + optimizer_time + Logger(f"性能分析 - 步骤 {step+1}:") + Logger(f" 数据加载时间: {data_time:.2f} ms") + Logger(f" 前向传播时间: {forward_time:.2f} ms") + Logger(f" 反向传播时间: {backward_time:.2f} ms") + Logger(f" 优化器时间: {optimizer_time:.2f} ms") + Logger(f" 总计算时间: {total_compute_time:.2f} ms") + Logger(f" 计算/数据比例: {total_compute_time / data_time:.2f}") + else: + # 非梯度累积步骤,没有优化器时间 + total_compute_time = forward_time + backward_time + Logger(f"性能分析 - 步骤 {step+1} (梯度累积中):") + Logger(f" 数据加载时间: {data_time:.2f} ms") + Logger(f" 前向传播时间: {forward_time:.2f} ms") + Logger(f" 反向传播时间: {backward_time:.2f} ms") + Logger(f" 总计算时间: {total_compute_time:.2f} ms") + Logger(f" 计算/数据比例: {total_compute_time / data_time:.2f}") + + # 打印日志 + if step % args.log_interval == 0: + spend_time = time.time() - start_time + Logger( + 'Epoch:[{}/{}]({}/{}) loss:{:.3f} lr:{:.12f} epoch_Time:{}min:'.format( + epoch + 1, + args.epochs, + step, + iter_per_epoch, + loss.item() * args.accumulation_steps, + optimizer.param_groups[-1]['lr'], + spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60)) + + if (wandb is not None) and (not ddp or dist.get_rank() == 0): + log_dict = { + "loss": loss.item() * args.accumulation_steps, + "lr": optimizer.param_groups[-1]['lr'], + "epoch_Time": spend_time / (step + 1) * iter_per_epoch // 60 - spend_time // 60 + } + + # 如果启用了性能分析,也记录性能指标 + if args.profile and (step + 1) % args.profile_interval == 0: + # 基本性能指标 + perf_dict = { + "data_time_ms": data_time, + "forward_time_ms": forward_time, + "backward_time_ms": backward_time + } + + # 只有在梯度累积步骤完成时才有优化器时间 + if (step + 1) % args.accumulation_steps == 0: + total_compute_time = forward_time + backward_time + optimizer_time + perf_dict.update({ + "optimizer_time_ms": optimizer_time, + "compute_time_ms": total_compute_time + }) + else: + total_compute_time = forward_time + backward_time + perf_dict.update({ + "compute_time_ms": total_compute_time + }) + + log_dict.update(perf_dict) + + wandb.log(log_dict) + + # 移除通信分析代码 + + # 保存模型 + if (step + 1) % args.save_interval == 0 and (not ddp or dist.get_rank() == 0): + model.eval() + # 使用函数开始处定义的moe_path变量 + ckp = f'{args.save_dir}/pretrain_{lm_config.dim}{moe_path}.pth' + + if isinstance(model, torch.nn.parallel.DistributedDataParallel): + state_dict = model.module.state_dict() #获取模型参数 + else: + state_dict = model.state_dict() #获取模型参数 + + torch.save(state_dict, ckp) #只保存参数 + model.train() + + except Exception as e: + print(f"Error occurred: {str(e)}") + save_path = f'{args.save_dir}/pretrain_{lm_config.dim}{moe_path}_nanERROR.pth' + if os.path.exists(save_path): + os.remove(save_path) + + if isinstance(model, torch.nn.parallel.DistributedDataParallel): + state_dict = model.module.state_dict() + else: + state_dict = model.state_dict() + torch.save(state_dict, save_path) + + for name, param in model.named_parameters(): + if param.grad is not None and torch.isnan(param.grad).any(): + print(f"NaN gradient in parameter: {name}") + + for name, param in model.named_parameters(): + if param.grad is not None and torch.isnan(param.grad).any(): + print(f"Parameter {name} values: {param.data}") + print(f"Parameter {name} gradients: {param.grad}") + + raise ValueError("NaN gradient detected") + + +def init_model(lm_config, pretrained_embedding_path: Optional[str] = None): + # 加载tokenizer + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + # 加载模型 + model = MiniMindLM(lm_config).to(args.device) + + # Load pretrained token embeddings if path is provided + if pretrained_embedding_path and os.path.exists(pretrained_embedding_path): + Logger(f"Loading pretrained token embeddings from {pretrained_embedding_path}") + embedding_weights = torch.load(pretrained_embedding_path, map_location=args.device) + model.tok_embeddings.load_state_dict(embedding_weights) + Logger("Successfully loaded pretrained token embeddings.") + elif pretrained_embedding_path: + Logger(f"Warning: Pretrained embedding path {pretrained_embedding_path} provided but file does not exist. Initializing embeddings from scratch.") + + # 打印模型参数 + Logger(f'LLM总参数量:{sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.3f} 百万') + return model, tokenizer + + +# 移除通信分析函数 + + +def init_distributed_mode(): + if not ddp: return #如果没有启用分布式数据并行(DDP),直接返回,不执行任何操作。 + global ddp_local_rank, DEVICE #声明这两个变量为全局变量,以便在函数外部也能访问它们。 + + dist.init_process_group(backend="nccl") #初始化分布式进程组,使用NCCL后端(NVIDIA Collective Communications Library),这是NVIDIA GPU之间通信的优化库。 + ddp_rank = int(os.environ["RANK"]) #从环境变量获取当前进程的全局编号。 + ddp_local_rank = int(os.environ["LOCAL_RANK"]) #从环境变量获取当前进程的本地编号。 + ddp_world_size = int(os.environ["WORLD_SIZE"]) #从环境变量获取当前进程组中的进程总数。 + DEVICE = f"cuda:{ddp_local_rank}" #根据本地编号选择GPU设备。 + torch.cuda.set_device(DEVICE) #设置当前进程的GPU设备。 + + +# torchrun --nproc_per_node 2 1-pretrain.py +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="MiniMind Pretraining") + parser.add_argument("--out_dir", type=str, default="out") + # 若要以最快速度实现zero则epochs设置为1轮;否则应当利用有限的数据训练2~6个epochs。 + parser.add_argument("--epochs", type=int, default=3) + parser.add_argument("--batch_size", type=int, default=24) + parser.add_argument("--learning_rate", type=float, default=2e-4) + parser.add_argument("--device", type=str, default="cuda:0" if torch.cuda.is_available() else "cpu") #如果GPU可用,则使用GPU,否则使用CPU。 + parser.add_argument("--dtype", type=str, default="bfloat16") + parser.add_argument("--use_wandb", default=True, action="store_true") + parser.add_argument("--wandb_project", type=str, default="MiniMind-Pretrain") + parser.add_argument("--num_workers", type=int, default=48) + parser.add_argument("--ddp", action="store_true") + parser.add_argument("--accumulation_steps", type=int, default=32) #梯度累积步数,用于控制梯度更新频率。 + parser.add_argument("--grad_clip", type=float, default=1.0) #梯度裁剪阈值,用于防止梯度爆炸。 + parser.add_argument("--warmup_iters", type=int, default=0) #预热迭代次数,用于控制学习率预热过程。 + parser.add_argument("--log_interval", type=int, default=100) #日志打印间隔,用于控制日志打印的频率。 + parser.add_argument("--save_interval", type=int, default=10000) #模型保存间隔,用于控制模型保存的频率。 + parser.add_argument('--local_rank', type=int, default=-1) #本地进程编号,用于分布式训练。 + parser.add_argument('--dim', default=1024, type=int) #模型维度,用于控制模型的大小。 + parser.add_argument('--n_layers', default=32, type=int) #层数,用于控制模型层数。 + parser.add_argument('--max_seq_len', default=1024, type=int) #最大序列长度,用于控制输入序列的最大长度。 + parser.add_argument('--use_moe', default=False, type=bool) #是否使用MOE,用于控制是否使用MOE。 + parser.add_argument('--disable_db', action='store_true', help="禁用数据库功能,使用固定值1e-4替代") #禁用数据库功能,启用特殊模式 + parser.add_argument("--data_path", type=str, default="./dataset/pretrain_hq.jsonl") #数据路径,用于控制数据集的路径。 + parser.add_argument("--pretrained_embedding_path", type=str, default=None, help="Path to pretrained token embedding weights (.pth file)") + # 性能分析相关参数 + parser.add_argument("--profile", action="store_true", default=True, help="启用性能分析") + parser.add_argument("--profile_interval", type=int, default=10, help="性能分析打印间隔(步数)") + parser.add_argument("--use_flash_attn", action="store_true", default=True, help="启用FlashAttention") + args = parser.parse_args() + print(args) + + + lm_config = LMConfig( + dim=args.dim, + n_layers=args.n_layers, + max_seq_len=args.max_seq_len, + use_moe=args.use_moe, + disable_db=args.disable_db, # 添加禁用数据库参数 + flash_attn=args.use_flash_attn # 添加FlashAttention支持 + ) #创建LMConfig对象,用于控制模型配置。 + args.save_dir = os.path.join(args.out_dir) #创建保存目录。 + os.makedirs(args.save_dir, exist_ok=True) #创建保存目录。 + os.makedirs(args.out_dir, exist_ok=True) #创建输出目录。 + tokens_per_iter = args.batch_size * lm_config.max_seq_len #计算每个迭代步骤的token数量。 + print(f"tokens_per_iter: {tokens_per_iter}") + device_type = "cuda" if "cuda" in args.device else "cpu" #确定设备类型。 + + # Determine the torch dtype + pt_dtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + + args.wandb_run_name = f"MiniMind-Pretrain-Epoch-{args.epochs}-BatchSize-{args.batch_size}-LearningRate-{args.learning_rate}" + + ctx = nullcontext() if device_type == "cpu" else torch.cuda.amp.autocast(dtype=pt_dtype) + + ddp = int(os.environ.get("RANK", -1)) != -1 # is this a ddp run? + ddp_local_rank, DEVICE = 0, "cuda:0" + + base_seed = 1337 + torch.manual_seed(base_seed) + torch.cuda.manual_seed(base_seed) + + if ddp: + init_distributed_mode() + args.device = torch.device(DEVICE) + rank = dist.get_rank() + torch.manual_seed(base_seed + rank) + # 同时设置 CUDA 的随机种子 + torch.cuda.manual_seed(base_seed + rank) + + if args.use_wandb and (not ddp or ddp_local_rank == 0): + import wandb + + # Merge args and lm_config parameters for wandb config + config = vars(args).copy() + config.update(lm_config.__dict__) + + wandb.init(project=args.wandb_project, name=args.wandb_run_name, config=config) + else: + wandb = None + + model, tokenizer = init_model(lm_config, args.pretrained_embedding_path) + train_ds = PretrainDataset(args.data_path, tokenizer, max_length=lm_config.max_seq_len) + train_sampler = DistributedSampler(train_ds) if ddp else None + # 优化DataLoader配置 + train_loader = DataLoader( + train_ds, + batch_size=args.batch_size, + pin_memory=True, + pin_memory_device=f"cuda:{ddp_local_rank}" if ddp else "cuda:0", # 指定pin_memory设备 + drop_last=False, + shuffle=False, + num_workers=args.num_workers, + sampler=train_sampler, + persistent_workers=True if args.num_workers > 0 else False, # 保持worker进程活跃 + prefetch_factor=2 if args.num_workers > 0 else None # 预取因子 + ) + + # 只有在使用float16时才启用GradScaler,bfloat16不需要 + scaler = torch.cuda.amp.GradScaler(enabled=(args.dtype == 'float16')) + optimizer = optim.AdamW(model.parameters(), lr=args.learning_rate) + + if ddp: + model._ddp_params_and_buffers_to_ignore = {"pos_cis"} + # 保留find_unused_parameters=True参数,因为模型中确实有未使用的参数 + model = DistributedDataParallel(model, device_ids=[ddp_local_rank], find_unused_parameters=True) + + # 暂时保留set_detect_anomaly以便调试 + # 训练稳定后可以注释掉这行来提高速度 + torch.autograd.set_detect_anomaly(True) + iter_per_epoch = len(train_loader) + for epoch in range(args.epochs): + train_epoch(epoch, wandb) diff --git a/train_pretrain_accelerate.py b/train_pretrain_accelerate.py new file mode 100644 index 0000000..287ce0c --- /dev/null +++ b/train_pretrain_accelerate.py @@ -0,0 +1,437 @@ +import os +# 设置环境变量 +os.environ["WANDB_MODE"] = "offline" # 或者使用 "dryrun" +import platform +import argparse +import time +import math +import warnings +import pandas as pd +import torch +from torch import optim, nn +from torch.utils.data import DataLoader +from contextlib import nullcontext +from typing import Optional +import datetime # Add datetime for time formatting +from accelerate import Accelerator +from accelerate.utils import set_seed +from accelerate.utils import DeepSpeedPlugin +from accelerate.utils import DistributedDataParallelKwargs +from transformers import AutoTokenizer, get_cosine_schedule_with_warmup + +from model.model import MiniMindLM +from model.LMConfig import LMConfig +from model.dataset import PretrainDataset + +warnings.filterwarnings('ignore') + +# 日志记录函数 +def Logger(msg, accelerator=None): + # 如果没有提供accelerator,则只在主进程打印 + if accelerator is None or accelerator.is_main_process: + print(f"[{time.strftime('%Y-%m-%d %H:%M:%S')}] {msg}") + +# Helper function to format seconds into HH:MM:SS +def format_time(seconds): + return str(datetime.timedelta(seconds=int(seconds))) + +# 获取学习率函数 +def get_lr(it, num_iters, learning_rate): + # 余弦学习率衰减 + return learning_rate * 0.5 * (1.0 + math.cos(math.pi * it / num_iters)) + +# 初始化模型函数 +def init_model(lm_config, pretrained_embedding_path=None): + tokenizer = AutoTokenizer.from_pretrained('./model/minimind_tokenizer') + model = MiniMindLM(lm_config) + + # 如果提供了预训练的嵌入权重,加载它们 + if pretrained_embedding_path: + Logger(f"Loading pretrained token embeddings from {pretrained_embedding_path}") + pretrained_embeddings = torch.load(pretrained_embedding_path) + model.tok_embeddings.weight.data.copy_(pretrained_embeddings) + model.output.weight.data.copy_(pretrained_embeddings) # 共享权重 + + Logger(f'LLM总参数量:{sum(p.numel() for p in model.parameters() if p.requires_grad) / 1e6:.3f} 百万') + return model, tokenizer + +def train_epoch(epoch, accelerator, model, train_loader, optimizer, scheduler, args, ctx, overall_start_time): + loss_fct = nn.CrossEntropyLoss(reduction='none') + epoch_start_time = time.time() + total_steps_in_epoch = len(train_loader) + total_training_steps = args.epochs * total_steps_in_epoch + moe_path = '_moe' if args.use_moe else '' + + # 添加CUDA事件来分析性能 (只在主进程进行) + if args.profile and accelerator.is_main_process: + data_start = torch.cuda.Event(enable_timing=True) + data_end = torch.cuda.Event(enable_timing=True) + forward_start = torch.cuda.Event(enable_timing=True) + forward_end = torch.cuda.Event(enable_timing=True) + backward_start = torch.cuda.Event(enable_timing=True) + backward_end = torch.cuda.Event(enable_timing=True) + optimizer_start = torch.cuda.Event(enable_timing=True) + optimizer_end = torch.cuda.Event(enable_timing=True) + + # 预取数据 + prefetch_factor = 2 # 预取的批次数 + data_iter = iter(train_loader) + prefetch_batches = [] + + # 预取初始批次 + for _ in range(min(prefetch_factor, len(train_loader))): + try: + batch = next(data_iter) + prefetch_batches.append(batch) + except StopIteration: + break + + # 在开始循环前初始化日志记录所需变量 + last_log_time = epoch_start_time + + for step in range(total_steps_in_epoch): + try: + # 计时数据加载 (只在主进程进行) + if args.profile and accelerator.is_main_process: + data_start.record() + + # 使用预取的数据 + if prefetch_batches: + X, Y, loss_mask = prefetch_batches.pop(0) + else: + # 如果预取队列为空,直接加载 + X, Y, loss_mask = next(data_iter) + + # 异步预取下一批数据 + if step + prefetch_factor < len(train_loader): + try: + batch = next(data_iter) + prefetch_batches.append(batch) + except StopIteration: + pass + + # 计时数据加载结束 (只在主进程进行) + if args.profile and accelerator.is_main_process: + data_end.record() + + # 更新学习率 + if scheduler is not None: + scheduler.step() + + # 计时前向传播 (只在主进程进行) + if args.profile and accelerator.is_main_process: + forward_start.record() + + # 前向传播 + with ctx: + res = model(X) + loss = loss_fct( + res.logits.view(-1, res.logits.size(-1)), + Y.view(-1) + ).view(Y.size()) + loss = (loss * loss_mask).sum() / loss_mask.sum() + # 添加辅助损失,如果存在的话 + try: + aux_loss = sum(l.feed_forward.aux_loss for l in model.module.layers + if hasattr(l.feed_forward, 'aux_loss')) + loss += aux_loss + except Exception as e: + Logger(f"Warning: Could not add auxiliary loss: {e}") + # 如果出错,不添加辅助损失 + loss = loss / args.accumulation_steps + + # 计时前向传播结束 (只在主进程进行) + if args.profile and accelerator.is_main_process: + forward_end.record() + + # 计时反向传播 (只在主进程进行) + if args.profile and accelerator.is_main_process: + backward_start.record() + + # 反向传播 + # 当使用DeepSpeed时,它会自动处理梯度累积和梯度裁剪 + accelerator.backward(loss) + + # 计时反向传播结束 (只在主进程进行) + if args.profile and accelerator.is_main_process: + backward_end.record() + + # 计时优化器步骤 (只在主进程进行) + if args.profile and accelerator.is_main_process: + optimizer_start.record() + + # 优化器步骤 - 当使用DeepSpeed时,它会自动处理梯度累积和梯度裁剪 + # 只有在达到累积步数时才会执行优化器步骤 + # 注意:当使用DeepSpeed时,它会自动处理梯度累积,所以我们不需要检查step % accumulation_steps + optimizer.step() + + # 当使用DeepSpeed时,zero_grad()会在step()之后自动调用 + # 但为了安全起见,我们仍然显式调用它 + optimizer.zero_grad() + + # 计时优化器步骤结束 (只在主进程进行) + if args.profile and accelerator.is_main_process: + optimizer_end.record() + + # 打印训练信息 (只在主进程进行) + if (step + 1) % args.log_interval == 0 and accelerator.is_main_process: + current_time = time.time() + # 计算性能指标 + if args.profile: + torch.cuda.synchronize() + # 使用自上次日志以来的时间计算性能指标,而不是总时间 + data_time = data_start.elapsed_time(data_end) + forward_time = forward_start.elapsed_time(forward_end) + backward_time = backward_start.elapsed_time(backward_end) + optimizer_time = optimizer_start.elapsed_time(optimizer_end) + iter_time = (current_time - last_log_time) * 1000 / args.log_interval # avg ms per iteration since last log + # total_time_ms = data_time + forward_time + backward_time + optimizer_time + + # 打印性能分析 + if (step + 1) % (args.log_interval * args.profile_interval) == 0: + Logger(f"性能分析 (Avg/iter over last {args.log_interval} steps) - " + f"Data: {data_time/args.log_interval:.2f}ms, " + f"Fwd: {forward_time/args.log_interval:.2f}ms, " + f"Bwd: {backward_time/args.log_interval:.2f}ms, " + f"Optim: {optimizer_time/args.log_interval:.2f}ms, " + f"Iter Time: {iter_time:.2f}ms", accelerator) + # 重置事件以便下次测量从0开始 + data_start = torch.cuda.Event(enable_timing=True) + data_end = torch.cuda.Event(enable_timing=True) + forward_start = torch.cuda.Event(enable_timing=True) + forward_end = torch.cuda.Event(enable_timing=True) + backward_start = torch.cuda.Event(enable_timing=True) + backward_end = torch.cuda.Event(enable_timing=True) + optimizer_start = torch.cuda.Event(enable_timing=True) + optimizer_end = torch.cuda.Event(enable_timing=True) + + + # 计算当前学习率 + current_lr = optimizer.param_groups[0]['lr'] + + # 计算时间 + epoch_elapsed_time = current_time - epoch_start_time + epoch_steps_done = step + 1 + epoch_avg_step_time = epoch_elapsed_time / epoch_steps_done + epoch_remaining_time = epoch_avg_step_time * (total_steps_in_epoch - epoch_steps_done) + + total_elapsed_time = current_time - overall_start_time + total_steps_done = epoch * total_steps_in_epoch + epoch_steps_done + total_avg_step_time = total_elapsed_time / total_steps_done if total_steps_done > 0 else 0 + total_remaining_time = total_avg_step_time * (total_training_steps - total_steps_done) if total_steps_done > 0 else 0 + + # 计算训练速度 (基于最近的log_interval) + interval_elapsed_time = current_time - last_log_time + tokens_processed_interval = args.log_interval * args.batch_size * args.max_seq_len + tokens_per_sec = tokens_processed_interval / interval_elapsed_time if interval_elapsed_time > 0 else 0 + last_log_time = current_time # 更新上次日志时间 + + Logger(f"Epoch {epoch+1}/{args.epochs}, Step {step+1}/{total_steps_in_epoch}, " + f"Loss: {loss.item()*args.accumulation_steps:.4f}, " + f"LR: {current_lr:.6f}, " + f"Speed: {tokens_per_sec:.2f} tokens/sec | " + f"Epoch Time Left: {format_time(epoch_remaining_time)} | " + f"Total Time Left: {format_time(total_remaining_time)}", accelerator) + + # 保存模型 (只在主进程进行) + if (step + 1) % args.save_interval == 0 and accelerator.is_main_process: + # 使用函数开始处定义的moe_path变量 + ckp = f'{args.save_dir}/pretrain_{args.dim}{moe_path}.pth' + + # 获取解包后的模型 + unwrapped_model = accelerator.unwrap_model(model) + + # 保存模型参数 + accelerator.save(unwrapped_model.state_dict(), ckp) + Logger(f"Model saved to {ckp}", accelerator) + + except Exception as e: + Logger(f"Error in training step: {e}", accelerator) + import traceback + Logger(traceback.format_exc(), accelerator) + +def main(): + parser = argparse.ArgumentParser(description="MiniMind Pretraining with Accelerate") + parser.add_argument("--out_dir", type=str, default="out") + parser.add_argument("--epochs", type=int, default=3) + parser.add_argument("--batch_size", type=int, default=24) + parser.add_argument("--learning_rate", type=float, default=2e-4) + parser.add_argument("--dtype", type=str, default="bfloat16") + parser.add_argument("--use_wandb", default=True, action="store_true") + parser.add_argument("--wandb_project", type=str, default="MiniMind-Pretrain") + parser.add_argument("--num_workers", type=int, default=48) + parser.add_argument("--accumulation_steps", type=int, default=32) + parser.add_argument("--grad_clip", type=float, default=1.0) + parser.add_argument("--warmup_iters", type=int, default=0) + parser.add_argument("--log_interval", type=int, default=100) + parser.add_argument("--save_interval", type=int, default=10000) + parser.add_argument('--dim', default=1024, type=int) + parser.add_argument('--n_layers', default=32, type=int) + parser.add_argument('--max_seq_len', default=1024, type=int) + parser.add_argument('--use_moe', default=False, type=bool) + parser.add_argument('--disable_db', action='store_true', help="禁用数据库功能,使用固定值1e-4替代") + parser.add_argument("--data_path", type=str, default="./dataset/pretrain_hq.jsonl") + parser.add_argument("--pretrained_embedding_path", type=str, default=None, help="Path to pretrained token embedding weights (.pth file)") + parser.add_argument("--profile", action="store_true", default=True, help="启用性能分析") + parser.add_argument("--profile_interval", type=int, default=10, help="性能分析打印间隔(步数)") + parser.add_argument("--use_flash_attn", action="store_true", default=True, help="启用FlashAttention") + parser.add_argument("--knowlwdge_num", type=int, default=64*64,help="知识库的数据数目") + parser.add_argument("--knowlwdge_length", type=int, default=8,help="知识库的句子长度") + args = parser.parse_args() + + ######################################################### + # 初始化accelerator和deepspeed + ######################################################### + # 设置ddp_kwargs以处理未使用的参数 + ddp_kwargs = DistributedDataParallelKwargs(find_unused_parameters=True) + # 创建DeepSpeedPlugin对象 + ds_plugin = DeepSpeedPlugin( + gradient_accumulation_steps=args.accumulation_steps, + gradient_clipping=args.grad_clip, + zero_stage=2, # 使用ZeRO-2优化 + offload_optimizer_device="cpu", # 将优化器状态卸载到CPU + offload_param_device="none", # 不将参数卸载到CPU + ) + accelerator = Accelerator( + kwargs_handlers=[ddp_kwargs], + deepspeed_plugin=ds_plugin, + mixed_precision="bf16" if args.dtype == "bfloat16" else "fp16" if args.dtype == "float16" else "no" + ) + + ######################################################### + # 设置随机种子 + ######################################################### + set_seed(1337 + accelerator.process_index) + + ######################################################### + # 配置模型 + ######################################################### + lm_config = LMConfig( + dim=args.dim, + n_layers=args.n_layers, + max_seq_len=args.max_seq_len, + use_moe=args.use_moe, + disable_db=args.disable_db, + flash_attn=args.use_flash_attn, + knowlwdge_num=args.knowlwdge_num, + knowlwdge_length=args.knowlwdge_length + ) + + ######################################################### + # 创建保存目录 + ######################################################### + args.save_dir = os.path.join(args.out_dir) + if accelerator.is_main_process: + os.makedirs(args.save_dir, exist_ok=True) + os.makedirs(args.out_dir, exist_ok=True) + + ######################################################### + # 设置数据类型 + ######################################################### + pt_dtype = {'float32': torch.float32, 'bfloat16': torch.bfloat16, 'float16': torch.float16}[args.dtype] + + + ######################################################### + # 配置wandb + ######################################################### + # 设置wandb运行名称 + args.wandb_run_name = f"MiniMind-Pretrain-Epoch-{args.epochs}-BatchSize-{args.batch_size}-LearningRate-{args.learning_rate}" + if args.use_wandb and accelerator.is_main_process: + import wandb + # 合并args和lm_config为一个字典 + config_dict = vars(args).copy() + config_dict.update(vars(lm_config)) + wandb.init(project=args.wandb_project, name=args.wandb_run_name, config=config_dict) + else: + wandb = None + + ######################################################### + # 打印信息 + ######################################################### + # 计算每次迭代的token数量 + tokens_per_iter = args.batch_size * lm_config.max_seq_len + if accelerator.is_main_process: + Logger(f"tokens_per_iter: {tokens_per_iter}", accelerator) + Logger("Configuration:", accelerator) + for key, value in config_dict.items(): + Logger(f" {key}: {value}", accelerator) + + + ######################################################### + # 设置自动混合精度上下文 + ######################################################### + ctx = nullcontext() if accelerator.device.type == "cpu" else torch.cuda.amp.autocast(dtype=pt_dtype) + + ######################################################### + # 初始化模型和tokenizer + ######################################################### + model, tokenizer = init_model(lm_config, args.pretrained_embedding_path) + # 将accelerator传递给init_model函数中的Logger调用 + Logger(f'模型初始化完成', accelerator) + + ######################################################### + # 处理位置编码张量问题 + ######################################################### + if hasattr(model, "pos_cis_real"): + Logger(f'检测到pos_cis_real实数张量,将其设置为参与分布式训练', accelerator) + # 设置模型的_ddp_params_and_buffers_to_ignore属性 + # model._ddp_params_and_buffers_to_ignore = {"pos_cis_real"} + # 兼容旧版本,检查是否仍有pos_cis + elif hasattr(model, "pos_cis"): + Logger(f'检测到pos_cis复数张量,将其设置为不参与分布式训练', accelerator) + # 设置模型的_ddp_params_and_buffers_to_ignore属性 + model._ddp_params_and_buffers_to_ignore = {"pos_cis"} + + ######################################################### + # 创建数据集和数据加载器 + ######################################################### + train_ds = PretrainDataset(args.data_path, tokenizer, max_length=lm_config.max_seq_len) + train_loader = DataLoader( + train_ds, + batch_size=args.batch_size, + pin_memory=True, + drop_last=False, + shuffle=True, + num_workers=args.num_workers, + persistent_workers=True if args.num_workers > 0 else False, + prefetch_factor=2 if args.num_workers > 0 else None + ) + + ######################################################### + # 创建优化器 + ######################################################### + optimizer = optim.AdamW(model.parameters(), lr=args.learning_rate) + + ######################################################### + # 创建学习率调度器 + ######################################################### + total_steps = len(train_loader) * args.epochs + warmup_steps = args.warmup_iters if args.warmup_iters > 0 else int(0.1 * total_steps) + scheduler = get_cosine_schedule_with_warmup( + optimizer, + num_warmup_steps=warmup_steps, + num_training_steps=total_steps + ) + + ######################################################### + # 准备训练 + ######################################################### + model, optimizer, train_loader, scheduler = accelerator.prepare( + model, optimizer, train_loader, scheduler + ) + + ######################################################### + # 训练循环 + ######################################################### + overall_start_time = time.time() # Record overall start time + for epoch in range(args.epochs): + train_epoch(epoch, accelerator, model, train_loader, optimizer, scheduler, args, ctx, overall_start_time) # Pass overall start time + + ######################################################### + # 关闭wandb + ######################################################### + if args.use_wandb and accelerator.is_main_process: + wandb.finish() + +if __name__ == "__main__": + main()