{"id":71,"date":"2024-07-21T17:32:50","date_gmt":"2024-07-21T09:32:50","guid":{"rendered":"https:\/\/www.onetts.com\/ai\/?p=71"},"modified":"2024-07-21T17:34:07","modified_gmt":"2024-07-21T09:34:07","slug":"qwen1-5-110b","status":"publish","type":"post","link":"https:\/\/www.onetts.com\/ai\/models\/qwen1-5-110b\/","title":{"rendered":"Qwen1.5-110B"},"content":{"rendered":"<p data-spm-anchor-id=\"a2c6h.13066512.0.i3.763536afF0pdEa\">Qwen1.5-110B\u662f\u7531\u901a\u4e49\u5343\u95ee\u56e2\u961f\u5f00\u53d1\u5e76\u5f00\u6e90\u7684\u5343\u4ebf\u53c2\u6570\u89c4\u6a21\u7684\u8bed\u8a00\u6a21\u578b\u3002\u5b83\u5c5e\u4e8eQwen1.5\u7cfb\u5217\uff0c\u662f\u8be5\u7cfb\u5217\u4e2d\u9996\u4e2a\u8fbe\u52301100\u4ebf\u53c2\u6570\u7684\u6a21\u578b\u3002Qwen1.5-110B\u91c7\u7528\u4e86\u5148\u8fdb\u7684Transformer\u89e3\u7801\u5668\u67b6\u6784\uff0c\u5e76\u5f15\u5165\u4e86\u5206\u7ec4\u67e5\u8be2\u6ce8\u610f\u529b\uff08Grouped Query Attention, GQA\uff09\uff0c\u8fd9\u4f7f\u5f97\u6a21\u578b\u5728\u63a8\u7406\u65f6\u66f4\u52a0\u9ad8\u6548\u3002\u6a21\u578b\u652f\u6301\u591a\u8bed\u8a00\u5904\u7406\uff0c\u6db5\u76d6\u82f1\u8bed\u3001\u4e2d\u6587\u3001\u6cd5\u8bed\u3001\u897f\u73ed\u7259\u8bed\u3001\u5fb7\u8bed\u3001\u4fc4\u8bed\u3001\u65e5\u8bed\u3001\u97e9\u8bed\u3001\u8d8a\u5357\u8bed\u548c\u963f\u62c9\u4f2f\u8bed\u7b49\u591a\u79cd\u8bed\u8a00\uff0c\u80fd\u591f\u5904\u7406\u957f\u8fbe32K tokens\u7684\u4e0a\u4e0b\u6587\u3002<\/p>\n<p>Qwen1.5-110B\u5728\u57fa\u7840\u80fd\u529b\u8bc4\u4f30\u4e2d\u8868\u73b0\u51fa\u8272\uff0c\u4e0eMeta-Llama3-70B\u76f8\u5ab2\u7f8e\u3002\u5728Chat\u8bc4\u4f30\u4e2d\uff0c\u5b83\u5728MT-Bench\u548cAlpacaEval 2.0\u4e0a\u7684\u8868\u73b0\u4e5f\u975e\u5e38\u7a81\u51fa\u3002\u6a21\u578b\u7684\u591a\u8bed\u8a00\u80fd\u529b\u548c\u9ad8\u6548\u7684\u63a8\u7406\u673a\u5236\u4f7f\u5176\u5728\u81ea\u7136\u8bed\u8a00\u5904\u7406\u9886\u57df\u5177\u6709\u5e7f\u6cdb\u7684\u5e94\u7528\u524d\u666f\u3002<\/p>\n<h2 id=\"-\">\u6a21\u578b\u8bc4\u6d4b<\/h2>\n<p>Qwen1.5-110B\u5728\u591a\u9879\u8bc4\u6d4b\u4e2d\u90fd\u53d6\u5f97\u4e86\u663e\u8457\u7684\u6210\u7ee9\u3002\u5728\u57fa\u7840\u80fd\u529b\u8bc4\u4f30\u4e2d\uff0c\u5b83\u4e0eMeta-Llama3-70B\u76f8\u6bd4\uff0c\u663e\u793a\u51fa\u4e86\u4e0d\u76f8\u4e0a\u4e0b\u7684\u6027\u80fd\u3002\u8fd9\u8868\u660eQwen1.5-110B\u5728\u7406\u89e3\u548c\u751f\u6210\u81ea\u7136\u8bed\u8a00\u65b9\u9762\u5177\u6709\u6781\u9ad8\u7684\u80fd\u529b\u3002\u7279\u522b\u662f\u5728\u591a\u8bed\u8a00\u5904\u7406\u548c\u957f\u6587\u672c\u7406\u89e3\u65b9\u9762\uff0cQwen1.5-110B\u5c55\u73b0\u51fa\u4e86\u5353\u8d8a\u7684\u6027\u80fd\u3002<\/p>\n<p>\u5728Chat\u8bc4\u4f30\u4e2d\uff0cQwen1.5-110B\u7684\u8868\u73b0\u5c24\u4e3a\u51fa\u8272\u3002\u5b83\u5728MT-Bench\u548cAlpacaEval 2.0\u7684\u8bc4\u4f30\u4e2d\uff0c\u4e0d\u4ec5\u5728\u5bf9\u8bdd\u6d41\u7545\u6027\u3001\u903b\u8f91\u8fde\u8d2f\u6027\u7b49\u65b9\u9762\u8868\u73b0\u4f18\u5f02\uff0c\u8fd8\u5728\u591a\u8f6e\u5bf9\u8bdd\u4e2d\u4fdd\u6301\u4e86\u8f83\u9ad8\u7684\u4e00\u81f4\u6027\u548c\u51c6\u786e\u6027\u3002\u8fd9\u4f7f\u5f97Qwen1.5-110B\u5728\u804a\u5929\u673a\u5668\u4eba\u3001\u667a\u80fd\u5ba2\u670d\u7b49\u9886\u57df\u5177\u6709\u6781\u5927\u7684\u5e94\u7528\u6f5c\u529b\u3002<\/p>\n<p>\u6a21\u578b\u7684\u8bc4\u6d4b\u7ed3\u679c\u8868\u660e\uff0cQwen1.5-110B\u5728\u6ca1\u6709\u5927\u5e45\u6539\u53d8\u9884\u8bad\u7ec3\u65b9\u6cd5\u7684\u60c5\u51b5\u4e0b\uff0c\u901a\u8fc7\u589e\u52a0\u6a21\u578b\u89c4\u6a21\uff0c\u663e\u8457\u63d0\u5347\u4e86\u6027\u80fd\u3002\u8fd9\u4e3a\u672a\u6765\u6a21\u578b\u89c4\u6a21\u6269\u5c55\u548c\u6570\u636e\u9884\u8bad\u7ec3\u63d0\u4f9b\u4e86\u91cd\u8981\u7684\u53c2\u8003\u3002<\/p>\n<h2 id=\"-\">\u90e8\u7f72\u4f7f\u7528<\/h2>\n<p>\u90e8\u7f72Qwen1.5-110B\u6a21\u578b\u9700\u8981\u7ecf\u8fc7\u51e0\u4e2a\u5173\u952e\u6b65\u9aa4\u3002\u4ee5\u4e0b\u662f\u8be6\u7ec6\u7684\u90e8\u7f72\u6b65\u9aa4\uff1a<\/p>\n<ol>\n<li><strong>\u73af\u5883\u51c6\u5907<\/strong>\uff1a\n<ul>\n<li>\u786e\u4fdd\u7cfb\u7edf\u5b89\u88c5\u4e86Python\u73af\u5883\uff0c\u63a8\u8350\u4f7f\u7528Python 3.6\u53ca\u4ee5\u4e0a\u7248\u672c\u3002<\/li>\n<li>\u5b89\u88c5\u5fc5\u8981\u7684\u5e93\uff0c\u5982PyTorch\u3001Transformers\u7b49\u3002<\/li>\n<\/ul>\n<\/li>\n<li><strong>\u6a21\u578b\u4e0b\u8f7d<\/strong>\uff1a\n<ul>\n<li>\u8bbf\u95eeModelScope\u5e73\u53f0\uff0c\u4e0b\u8f7dQwen1.5-110B\u6a21\u578b\u3002\u53ef\u4ee5\u901a\u8fc7\u4ee5\u4e0b\u4ee3\u7801\u5b9e\u73b0\uff1a\n<pre><code class=\"lang-python\"><span class=\"token keyword\">from<\/span> modelscope <span class=\"token keyword\">import<\/span> snapshot_download\r\nmodel_dir <span class=\"token operator\">=<\/span> snapshot_download<span class=\"token punctuation\">(<\/span><span class=\"token string\">'qwen\/Qwen1.5-110B-Chat'<\/span><span class=\"token punctuation\">)<\/span>\r\n<\/code><\/pre>\n<\/li>\n<\/ul>\n<\/li>\n<li><strong>\u6a21\u578b\u52a0\u8f7d<\/strong>\uff1a\n<ul>\n<li>\u4f7f\u7528AutoModelForCausalLM\u548cAutoTokenizer\u52a0\u8f7d\u6a21\u578b\uff1a\n<pre><code class=\"lang-python\"><span class=\"token keyword\">from<\/span> modelscope <span class=\"token keyword\">import<\/span> AutoModelForCausalLM<span class=\"token punctuation\">,<\/span> AutoTokenizer\r\ndevice <span class=\"token operator\">=<\/span> <span class=\"token string\">\"cuda\"<\/span>  <span class=\"token comment\" spellcheck=\"true\"># the device to load the model onto<\/span>\r\nmodel <span class=\"token operator\">=<\/span> AutoModelForCausalLM<span class=\"token punctuation\">.<\/span>from_pretrained<span class=\"token punctuation\">(<\/span>\r\n    <span class=\"token string\">\"qwen\/Qwen1.5-110B-Chat\"<\/span><span class=\"token punctuation\">,<\/span>\r\n    torch_dtype<span class=\"token operator\">=<\/span><span class=\"token string\">\"auto\"<\/span><span class=\"token punctuation\">,<\/span>\r\n    device_map<span class=\"token operator\">=<\/span><span class=\"token string\">\"auto\"<\/span>\r\n<span class=\"token punctuation\">)<\/span>\r\ntokenizer <span class=\"token operator\">=<\/span> AutoTokenizer<span class=\"token punctuation\">.<\/span>from_pretrained<span class=\"token punctuation\">(<\/span><span class=\"token string\">\"qwen\/Qwen1.5-110B-Chat\"<\/span><span class=\"token punctuation\">)<\/span>\r\n<\/code><\/pre>\n<\/li>\n<\/ul>\n<\/li>\n<li><strong>\u6a21\u578b\u63a8\u7406<\/strong>\uff1a\n<ul>\n<li>\u7f16\u5199\u63a8\u7406\u4ee3\u7801\uff0c\u751f\u6210\u6587\u672c\uff1a\n<pre><code class=\"lang-python\">prompt <span class=\"token operator\">=<\/span> <span class=\"token string\">\"Give me a short introduction to large language model.\"<\/span>\r\nmessages <span class=\"token operator\">=<\/span> <span class=\"token punctuation\">[<\/span>\r\n    <span class=\"token punctuation\">{<\/span><span class=\"token string\">\"role\"<\/span><span class=\"token punctuation\">:<\/span> <span class=\"token string\">\"system\"<\/span><span class=\"token punctuation\">,<\/span> <span class=\"token string\">\"content\"<\/span><span class=\"token punctuation\">:<\/span> <span class=\"token string\">\"You are a helpful assistant.\"<\/span><span class=\"token punctuation\">}<\/span><span class=\"token punctuation\">,<\/span>\r\n    <span class=\"token punctuation\">{<\/span><span class=\"token string\">\"role\"<\/span><span class=\"token punctuation\">:<\/span> <span class=\"token string\">\"user\"<\/span><span class=\"token punctuation\">,<\/span> <span class=\"token string\">\"content\"<\/span><span class=\"token punctuation\">:<\/span> prompt<span class=\"token punctuation\">}<\/span>\r\n<span class=\"token punctuation\">]<\/span>\r\ntext <span class=\"token operator\">=<\/span> tokenizer<span class=\"token punctuation\">.<\/span>apply_chat_template<span class=\"token punctuation\">(<\/span>\r\n    messages<span class=\"token punctuation\">,<\/span>\r\n    tokenize<span class=\"token operator\">=<\/span><span class=\"token boolean\">False<\/span><span class=\"token punctuation\">,<\/span>\r\n    add_generation_prompt<span class=\"token operator\">=<\/span><span class=\"token boolean\">True<\/span>\r\n<span class=\"token punctuation\">)<\/span>\r\nmodel_inputs <span class=\"token operator\">=<\/span> tokenizer<span class=\"token punctuation\">(<\/span><span class=\"token punctuation\">[<\/span>text<span class=\"token punctuation\">]<\/span><span class=\"token punctuation\">,<\/span> return_tensors<span class=\"token operator\">=<\/span><span class=\"token string\">\"pt\"<\/span><span class=\"token punctuation\">)<\/span><span class=\"token punctuation\">.<\/span>to<span class=\"token punctuation\">(<\/span>device<span class=\"token punctuation\">)<\/span>\r\ngenerated_ids <span class=\"token operator\">=<\/span> model<span class=\"token punctuation\">.<\/span>generate<span class=\"token punctuation\">(<\/span>\r\n    model_inputs<span class=\"token punctuation\">.<\/span>input_ids<span class=\"token punctuation\">,<\/span>\r\n    max_new_tokens<span class=\"token operator\">=<\/span><span class=\"token number\">512<\/span>\r\n<span class=\"token punctuation\">)<\/span>\r\ngenerated_ids <span class=\"token operator\">=<\/span> <span class=\"token punctuation\">[<\/span>\r\n    output_ids<span class=\"token punctuation\">[<\/span>len<span class=\"token punctuation\">(<\/span>input_ids<span class=\"token punctuation\">)<\/span><span class=\"token punctuation\">:<\/span><span class=\"token punctuation\">]<\/span> <span class=\"token keyword\">for<\/span> input_ids<span class=\"token punctuation\">,<\/span> output_ids <span class=\"token keyword\">in<\/span> zip<span class=\"token punctuation\">(<\/span>model_inputs<span class=\"token punctuation\">.<\/span>input_ids<span class=\"token punctuation\">,<\/span> generated_ids<span class=\"token punctuation\">)<\/span>\r\n<span class=\"token punctuation\">]<\/span>\r\nresponse <span class=\"token operator\">=<\/span> tokenizer<span class=\"token punctuation\">.<\/span>batch_decode<span class=\"token punctuation\">(<\/span>generated_ids<span class=\"token punctuation\">,<\/span> skip_special_tokens<span class=\"token operator\">=<\/span><span class=\"token boolean\">True<\/span><span class=\"token punctuation\">)<\/span><span class=\"token punctuation\">[<\/span><span class=\"token number\">0<\/span><span class=\"token punctuation\">]<\/span>\r\n<\/code><\/pre>\n<\/li>\n<\/ul>\n<\/li>\n<li><strong>\u663e\u5b58\u8981\u6c42<\/strong>\uff1a\n<ul>\n<li>\u6a21\u578b\u63a8\u7406\u9700\u8981\u8f83\u9ad8\u7684\u663e\u5b58\uff0c\u63a8\u8350\u4f7f\u75284\u5361A100\uff0c\u6bcf\u5361230G\u663e\u5b58\u3002<\/li>\n<\/ul>\n<\/li>\n<li><strong>\u6a21\u578b\u5fae\u8c03<\/strong>\uff1a\n<ul>\n<li>\u4f7f\u7528\u9b54\u642d\u793e\u533a\u7684\u5fae\u8c03\u6846\u67b6SWIFT\u8fdb\u884c\u6a21\u578b\u5fae\u8c03\uff0c\u652f\u6301Qwen1.5\u5168\u7cfb\u5217\u6a21\u578b\u3002<\/li>\n<\/ul>\n<\/li>\n<\/ol>\n<h2 id=\"-\">\u5e38\u89c1\u95ee\u9898<\/h2>\n<h4 id=\"1-qwen1-5-110b-\">1. \u5982\u4f55\u4e0b\u8f7dQwen1.5-110B\u6a21\u578b\uff1f<\/h4>\n<p>\u53ef\u4ee5\u901a\u8fc7ModelScope\u5e73\u53f0\u4e0b\u8f7dQwen1.5-110B\u6a21\u578b\u3002\u4ee5\u4e0b\u662f\u4e0b\u8f7d\u4ee3\u7801\u793a\u4f8b\uff1a<\/p>\n<pre><code class=\"lang-python\"><span class=\"token keyword\">from<\/span> modelscope <span class=\"token keyword\">import<\/span> snapshot_download\r\nmodel_dir <span class=\"token operator\">=<\/span> snapshot_download<span class=\"token punctuation\">(<\/span><span class=\"token string\">'qwen\/Qwen1.5-110B-Chat'<\/span><span class=\"token punctuation\">)<\/span>\r\n<\/code><\/pre>\n<h4 id=\"2-\">2. \u6a21\u578b\u63a8\u7406\u65f6\u663e\u5b58\u4e0d\u8db3\u600e\u4e48\u529e\uff1f<\/h4>\n<p>\u5982\u679c\u663e\u5b58\u4e0d\u8db3\uff0c\u53ef\u4ee5\u5c1d\u8bd5\u4ee5\u4e0b\u65b9\u6cd5\uff1a<\/p>\n<ul>\n<li>\u964d\u4f4e\u6a21\u578b\u7684batch size\u3002<\/li>\n<li>\u4f7f\u7528\u66f4\u5c0f\u7684\u6a21\u578b\u7248\u672c\u3002<\/li>\n<li>\u4f18\u5316\u6a21\u578b\u7684\u8f93\u5165\u6570\u636e\uff0c\u51cf\u5c11\u8f93\u5165\u957f\u5ea6\u3002<\/li>\n<\/ul>\n<h4 id=\"3-\">3. \u5982\u4f55\u8fdb\u884c\u6a21\u578b\u5fae\u8c03\uff1f<\/h4>\n<p>\u53ef\u4ee5\u4f7f\u7528\u9b54\u642d\u793e\u533a\u7684\u5fae\u8c03\u6846\u67b6SWIFT\u8fdb\u884c\u6a21\u578b\u5fae\u8c03\u3002\u4ee5\u4e0b\u662f\u8bad\u7ec3\u53c2\u6570\u914d\u7f6e\u793a\u4f8b\uff1a<\/p>\n<pre><code class=\"lang-bash\">nproc_per_node<span class=\"token operator\">=<\/span>4\r\nCUDA_VISIBLE_DEVICES<span class=\"token operator\">=<\/span>0,1,2,3 \\\r\nNPROC_PER_NODE<span class=\"token operator\">=<\/span><span class=\"token variable\">$nproc_per_node<\/span> \\\r\nswift sft \\\r\n    --model_type qwen1half-110b-chat \\\r\n    --sft_type lora \\\r\n    --tuner_backend peft \\\r\n    --dtype AUTO \\\r\n    --output_dir output \\\r\n    --ddp_backend nccl \\\r\n    --num_train_epochs 2 \\\r\n    --max_length 2048 \\\r\n    --check_dataset_strategy warning \\\r\n    --lora_rank 8 \\\r\n    --lora_alpha 32 \\\r\n    --lora_dropout_p 0.05 \\\r\n    --lora_target_modules ALL \\\r\n    --gradient_checkpointing <span class=\"token boolean\">true<\/span> \\\r\n    --batch_size 1 \\\r\n    --weight_decay 0.1 \\\r\n    --learning_rate 1e-4 \\\r\n    --gradient_accumulation_steps <span class=\"token variable\">$(<span class=\"token function\">expr<\/span> 16 \/ $nproc_per_node)<\/span> \\\r\n    --max_grad_norm 0.5 \\\r\n    --warmup_ratio 0.03 \\\r\n    --eval_steps 100 \\\r\n    --save_steps 100 \\\r\n    --save_total_limit 2 \\\r\n    --logging_steps 10 \\\r\n    --use_flash_attn <span class=\"token boolean\">true<\/span> \\\r\n    --deepspeed default-zero3 \\\r\n    --self_cognition_sample 2000 \\\r\n    --model_name \u5c0f\u767d <span class=\"token string\">'Xiao Bai'<\/span> \\\r\n    --model_author \u9b54\u642d ModelScope\r\n<\/code><\/pre>\n<h2 id=\"-\">\u76f8\u5173\u8d44\u6e90<\/h2>\n<p>\u4ee5\u4e0b\u662f\u4e00\u4e9b\u53ef\u4ee5\u8bbf\u95ee\u7684\u76f8\u5173\u8d44\u6e90\u94fe\u63a5\uff1a<\/p>\n<ul>\n<li><strong>\u6a21\u578b\u4f53\u9a8c\u94fe\u63a5<\/strong>\uff1a\n<ul>\n<li><a target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https:\/\/modelscope.cn\/studios\/qwen\/Qwen1.5-110B-Chat-demo\">Qwen1.5-110B-Chat-demo<\/a><\/li>\n<li><a target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https:\/\/www.modelscope.cn\/studios\/LLM-Research\/Llama3-Qwen1.5-Arena\">Llama3-Qwen1.5-Arena<\/a><\/li>\n<\/ul>\n<\/li>\n<li><strong>\u6a21\u578b\u4e0b\u8f7d\u94fe\u63a5<\/strong>\uff1a\n<ul>\n<li><a target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https:\/\/www.modelscope.cn\/models\/qwen\/Qwen1.5-110B-Chat\">Qwen1.5-110B-Chat<\/a><\/li>\n<li><a target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https:\/\/www.modelscope.cn\/models\/qwen\/Qwen1.5-110B\">Qwen1.5-110B<\/a><\/li>\n<\/ul>\n<\/li>\n<li><strong>\u9b54\u642d\u793e\u533a<\/strong>\uff1a\n<ul>\n<li><a target=\"_blank\" rel=\"noopener noreferrer nofollow\" href=\"https:\/\/modelscope.cn\/\">\u9b54\u642d\u793e\u533a<\/a><\/li>\n<\/ul>\n<\/li>\n<\/ul>\n<p>\u901a\u8fc7\u8fd9\u4e9b\u8d44\u6e90\uff0c\u7528\u6237\u53ef\u4ee5\u66f4\u6df1\u5165\u5730\u4e86\u89e3Qwen1.5-110B\u6a21\u578b\uff0c\u5e76\u8fdb\u884c\u76f8\u5173\u7684\u6a21\u578b\u4f53\u9a8c\u3001\u4e0b\u8f7d\u548c\u8bad\u7ec3\u3002<\/p>\n","protected":false},"excerpt":{"rendered":"<p>Qwen1.5-110B\u662f\u7531\u901a\u4e49\u5343\u95ee\u56e2\u961f\u5f00\u53d1\u5e76\u5f00\u6e90\u7684\u5343\u4ebf\u53c2\u6570\u89c4\u6a21\u7684\u8bed\u8a00\u6a21\u578b\u3002\u5b83\u5c5e\u4e8eQwen1.5\u7cfb\u5217\uff0c\u662f\u8be5\u7cfb\u5217\u4e2d\u9996\u4e2a\u8fbe\u52301100\u4ebf\u53c2\u6570\u7684\u6a21\u578b\u3002Qwen1.5-110B\u91c7\u7528\u4e86\u5148\u8fdb\u7684Transformer\u89e3\u7801\u5668\u67b6\u6784\uff0c\u5e76\u5f15\u5165\u4e86\u5206\u7ec4\u67e5\u8be2\u6ce8\u610f\u529b\uff08Grouped Query Attention, GQA\uff09\uff0c\u8fd9\u4f7f\u5f97\u6a21\u578b\u5728\u63a8\u7406\u65f6\u66f4\u52a0\u9ad8\u6548\u3002\u6a21\u578b\u652f\u6301\u591a\u8bed\u8a00\u5904\u7406\uff0c\u6db5\u76d6\u82f1\u8bed\u3001\u4e2d\u6587\u3001\u6cd5\u8bed\u3001\u897f\u73ed\u7259\u8bed\u3001\u5fb7\u8bed\u3001\u4fc4\u8bed\u3001\u65e5\u8bed\u3001\u97e9\u8bed\u3001\u8d8a\u5357\u8bed\u548c\u963f\u62c9\u4f2f\u8bed\u7b49\u591a\u79cd\u8bed\u8a00\uff0c\u80fd\u591f\u5904\u7406\u957f\u8fbe32K tokens\u7684\u4e0a\u4e0b\u6587\u3002 Qwen1.5-110B\u5728\u57fa\u7840\u80fd\u529b\u8bc4\u4f30\u4e2d\u8868\u73b0\u51fa\u8272<\/p>\n","protected":false},"author":1,"featured_media":73,"comment_status":"open","ping_status":"closed","sticky":false,"template":"","format":"standard","meta":{"footnotes":""},"categories":[3],"tags":[],"collection":[42],"company":[6],"rank":[],"class_list":["post-71","post","type-post","status-publish","format-standard","has-post-thumbnail","hentry","category-llm","collection-qwen1-5","company-alibaba"],"_links":{"self":[{"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/posts\/71","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/posts"}],"about":[{"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/types\/post"}],"author":[{"embeddable":true,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/comments?post=71"}],"version-history":[{"count":2,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/posts\/71\/revisions"}],"predecessor-version":[{"id":74,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/posts\/71\/revisions\/74"}],"wp:featuredmedia":[{"embeddable":true,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/media\/73"}],"wp:attachment":[{"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/media?parent=71"}],"wp:term":[{"taxonomy":"category","embeddable":true,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/categories?post=71"},{"taxonomy":"post_tag","embeddable":true,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/tags?post=71"},{"taxonomy":"collection","embeddable":true,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/collection?post=71"},{"taxonomy":"company","embeddable":true,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/company?post=71"},{"taxonomy":"rank","embeddable":true,"href":"https:\/\/www.onetts.com\/ai\/wp-json\/wp\/v2\/rank?post=71"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}