From 22f370322412074174cde20ecfd14ec03657ab63 Mon Sep 17 00:00:00 2001
From: lyg <1543117173@qq.com>
Date: 星期一, 07 七月 2025 16:20:25 +0800
Subject: [PATCH] 生成数据库

---
 db_struct_flow.py | 1584 ++++++++++++++++++++++++++++++++++++++++-----------------
 1 files changed, 1,102 insertions(+), 482 deletions(-)

diff --git a/db_struct_flow.py b/db_struct_flow.py
index bf2817a..34f7e77 100644
--- a/db_struct_flow.py
+++ b/db_struct_flow.py
@@ -1,4 +1,7 @@
+import asyncio
+import math
 import os
+import subprocess
 import time
 from datetime import datetime
 
@@ -6,93 +9,58 @@
 import re
 import json
 
+from langchain_community.chat_models import ChatOpenAI
+from langchain_core.prompts import HumanMessagePromptTemplate, SystemMessagePromptTemplate
+import textwrap
 import data_templates
+from knowledgebase import utils
 from knowledgebase.db.db_helper import create_project, create_device, create_data_stream, \
-    update_rule_enc, create_extend_info, create_ref_ds_rule_stream, create_ins_format
+    update_rule_enc, create_extend_info, create_ref_ds_rule_stream, create_ins_format, make_attr, init_db_helper
 from knowledgebase.db.data_creator import create_prop_enc, create_enc_pkt, get_data_ty, create_any_pkt
 
-from knowledgebase.db.models import TProject
+from knowledgebase.db.models import TProject, init_base_db
 
-file_map = {
-    "鏂囨。鍚堝苟": "./doc/鏂囨。鍚堝苟.md",
-    "閬ユ祴婧愬寘璁捐鎶ュ憡": "./doc/XA-5D鏃犱汉鏈哄垎绯荤粺鎺㈡祴婧愬寘璁捐鎶ュ憡锛堝叕寮�锛�.md",
-    "閬ユ祴澶х翰": "./doc/XA-5D鏃犱汉鏈烘帰娴嬪ぇ绾诧紙鍏紑锛�.md",
-    "鎬荤嚎浼犺緭閫氫俊甯у垎閰�": "./doc/XA-5D鏃犱汉鏈�1314A鎬荤嚎浼犺緭閫氫俊甯у垎閰嶏紙鍏紑锛�.md",
-    "搴旂敤杞欢鐢ㄦ埛闇�姹�": "./doc/XA-5D鏃犱汉鏈鸿蒋浠剁敤鎴烽渶姹傦紙鍏紑锛�.docx.md",
-    "鎸囦护鏍煎紡": "./doc/ZL鏍煎紡(鍏紑).docx.md"
-}
-# file_map = {
-#     "閬ユ祴婧愬寘璁捐鎶ュ憡": "./docs/HY-4A鏁扮鍒嗙郴缁熼仴娴嬫簮鍖呰璁℃姤鍛� Z 240824 鏇存敼3(鍐呴儴) .docx.md",
-#     "閬ユ祴澶х翰": "./docs/HY-4A鍗槦閬ユ祴澶х翰 Z 240824 鏇存敼3锛堝唴閮級.docx.md",
-#     "鎬荤嚎浼犺緭閫氫俊甯у垎閰�": "./docs/HY-4A鍗槦1553B鎬荤嚎浼犺緭閫氫俊甯у垎閰� Z 240824 鏇存敼3锛堝唴閮級.docx.md",
-#     "搴旂敤杞欢鐢ㄦ埛闇�姹�": "./docs/HY-4A鏁扮鍒嗙郴缁熷簲鐢ㄨ蒋浠剁敤鎴烽渶姹傦紙鏄熷姟绠$悊鍒嗗唽锛� Z 240831 鏇存敼4锛堝唴閮級.docx.md"
-# }
-# file_map = {
-#     "鏂囨。鍚堝苟": "./doc/鏂囨。鍚堝苟.md",
-#     "閬ユ祴婧愬寘璁捐鎶ュ憡": "./doc/XA-5D鏃犱汉鏈哄垎绯荤粺鎺㈡祴婧愬寘璁捐鎶ュ憡锛堝叕寮�锛�.md",
-#     "閬ユ祴澶х翰": "./doc/XA-5D鏃犱汉鏈烘帰娴嬪ぇ绾诧紙鍏紑锛�.md",
-#     "鎬荤嚎浼犺緭閫氫俊甯у垎閰�": "./doc/XA-5D鏃犱汉鏈�1314A鎬荤嚎浼犺緭閫氫俊甯у垎閰嶏紙鍏紑锛�.md"
-# }
+from knowledgebase.db.doc_db_helper import doc_dbh
+from knowledgebase.llm import llm
 
-BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
-API_KEY = 'sk-15ecf7e273ad4b729c7f7f42b542749e'
-MODEL_NAME = 'qwen2.5-14b-instruct-1m'
+# BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1'
+# API_KEY = 'sk-15ecf7e273ad4b729c7f7f42b542749e'
+# MODEL_NAME = 'qwen2.5-72b-instruct'
 
-# BASE_URL = 'http://10.74.15.164:11434/v1/'
+# BASE_URL = 'http://10.74.15.171:11434/v1/'
 # API_KEY = 'ollama'
-# MODEL_NAME = 'qwen2.5:32b-128k'
+# MODEL_NAME = 'qwen2.5:72b-instruct'
 
-# BASE_URL = 'http://10.74.15.164:1001/api'
-# API_KEY = 'sk-a909385bc14d4491a718b6ee264c3227'
-# MODEL_NAME = 'qwen2.5:32b-128k'
+# BASE_URL = 'http://chat.com/api'
+# API_KEY = 'sk-49457e83f734475cb4cf7066c649d563'
+# MODEL_NAME = 'qwen2.5:72b-120k'
+
+BASE_URL = 'http://10.74.15.171:8000/v1'
+API_KEY = 'EMPTY'
+# MODEL_NAME = 'QwQ:32b'
+MODEL_NAME = 'Qwen2.5-72B-Instruct-AWQ'
+# MODEL_NAME = 'qwen2.5:72b-instruct'
 
 USE_CACHE = True
 assistant_msg = """
+浣犳槸涓�鍚嶈祫娣辩殑杞欢宸ョ▼甯堛��
+"""
+#
+# ## 鎶�鑳�
+# ### 鎶�鑳� 1锛氶�氫俊鍗忚鍒嗘瀽
+# 1. 鎺ユ敹閫氫俊鍗忚鐩稿叧淇℃伅锛岀悊瑙e崗璁殑瑙勫垯鍜屾祦绋嬶紝浠呬緷鎹墍缁欎俊鎭繘琛屽垎鏋愩��
+#
+# ## 鐩爣瀵煎悜
+# 1. 閫氳繃瀵规枃妗e拰閫氫俊鍗忚鐨勫垎鏋愶紝涓虹敤鎴锋彁渚涙竻鏅般�佸噯纭殑鏁版嵁缁撴瀯锛屽府鍔╃敤鎴锋洿濂藉湴鐞嗚В鍜屼娇鐢ㄧ浉鍏充俊鎭��
+#
+# ## 闄愬埗锛�
+# - 鎵�杈撳嚭鐨勫唴瀹瑰繀椤绘寜鐓SON鏍煎紡杩涜缁勭粐锛屼笉鑳藉亸绂绘鏋惰姹傦紝涓斾弗鏍奸伒寰枃妗e唴瀹硅繘琛岃緭鍑猴紝鍙緭鍑� JSON 锛屼笉瑕佽緭鍑哄叾瀹冩枃瀛椼��
+# - 涓嶈緭鍑轰换浣曟敞閲婄瓑鎻忚堪鎬т俊鎭��
+tc_system_msg = """
 # 瑙掕壊
-浣犳槸涓�涓笓涓氱殑鏂囨。閫氫俊鍒嗘瀽甯堬紝鎿呴暱杩涜鏂囨。鍒嗘瀽鍜岄�氫俊鍗忚鍒嗘瀽锛屽悓鏃惰兘澶熻В鏋� markdown 绫诲瀷鐨勬枃妗c�傛嫢鏈夋垚鐔熷噯纭殑鏂囨。闃呰涓庡垎鏋愯兘鍔涳紝鑳藉濡ュ杽澶勭悊澶氭枃妗i棿瀛樺湪寮曠敤鍏崇郴鐨勫鏉傛儏鍐点��
-
-## 鎶�鑳�
-### 鎶�鑳� 1锛氭枃妗e垎鏋愶紙鍖呮嫭 markdown 鏂囨。锛�
-1. 褰撶敤鎴锋彁渚涙枃妗f椂锛屼粩缁嗛槄璇绘枃妗e唴瀹癸紝涓ユ牸鎸夌収鏂囨。涓殑鎻忚堪鎻愬彇鍏抽敭淇℃伅锛屼笉寰楀姞鍏ヨ嚜宸辩殑鍥炵瓟鎴栧缓璁��
-2. 鍒嗘瀽鏂囨。鐨勭粨鏋勩�佷富棰樺拰閲嶇偣鍐呭锛屽悓鏍峰彧渚濇嵁鏂囨。杩涜琛ㄨ堪銆�
-3. 濡傛灉鏂囨。闂村瓨鍦ㄥ紩鐢ㄥ叧绯伙紝姊崇悊寮曠敤鑴夌粶锛屾槑纭悇鏂囨。涔嬮棿鐨勫叧鑱旓紝涓斾粎鍛堢幇鏂囨。涓綋鐜扮殑鍐呭銆�
-
-### 鎶�鑳� 2锛氶�氫俊鍗忚鍒嗘瀽
-1. 鎺ユ敹閫氫俊鍗忚鐩稿叧淇℃伅锛岀悊瑙e崗璁殑瑙勫垯鍜屾祦绋嬶紝浠呬緷鎹墍缁欎俊鎭繘琛屽垎鏋愩��
-
-## 鑳屾櫙鐭ヨ瘑
-###杞欢涓昏鍔熻兘涓庤繍琛屾満鍒舵�荤粨濡備笅锛�
-1. 鏁版嵁閲囬泦鍜屽鐞嗭細
-   DIU璐熻矗鏍规嵁鍗槦鐨勫伐浣滅姸鎬佹垨妯″紡鎻愪緵閬ユ祴鏁版嵁锛屽寘鎷ā鎷熼噺锛圓N锛夈�佹�荤嚎淇″彿锛圔L锛変互鍙婃俯搴︼紙TH锛夊拰鏁板瓧閲忥紙DS锛夛紝骞跺皢杩欎簺淇℃伅鎵撳寘锛岄�氳繃鎬荤嚎鍙戦�佺粰SMU銆�
-   SMU鍒欐敹闆嗙‖閫氶亾涓婄殑閬ユ祴鍙傛暟锛屽苟閫氳繃鎬荤嚎鎺ユ敹DIU閲囬泦鐨勪俊鎭��
-2. 澶氳矾澶嶇敤涓庢暟鎹紶杈擄細
-   閬ユ祴婧愬寘琚粍缁囨垚E-PDU锛岃繘涓�姝ュ鐢ㄤ负M-PDU锛屽苟濉厖鍒癡CDU涓瀯鎴愰仴娴嬪抚銆�
-   鍒╃敤CCSDS AOS CADU鏍煎紡杩涜閬ユ祴鏁版嵁鐨勫璺鐢ㄥ拰浼犺緭銆�
-3. 铏氭嫙淇¢亾锛圴C锛夎皟搴︽満鍒讹細
-   閫氳繃甯歌閬ユ祴VC銆佺獊鍙戞暟鎹甐C銆佸欢鏃堕仴娴媀C銆佽褰曟暟鎹甐C浠ュ強鍥炴斁VC瀹炵幇涓嶅悓绫诲瀷鐨勬暟鎹笅浼犮��
-4. 閬ユ帶鎸囦护澶勭悊锛�
-   涓婅閬ユ帶鍖呮嫭鐩存帴鎸囦护鍜岄棿鎺ユ寚浠わ紝闇�缁忚繃鏍煎紡楠岃瘉鍚庤浆鍙戠粰鐩稿簲鍗曟満鎵ц銆�
-   閬ユ帶甯ч�氳繃鐗瑰畾鐨勮櫄鎷熶俊閬擄紙VC锛夎繘琛屼紶杈撱��
-杩欎簺鐭ヨ瘑闇�瑕佷綘璁颁綇锛屽啀鍚庣画鐨勫鐞嗕腑鍙互甯姪浣犵悊瑙h澶勭悊鐨勬暟鎹��
-
-## 鐩爣瀵煎悜
-1. 閫氳繃瀵规枃妗e拰閫氫俊鍗忚鐨勫垎鏋愶紝涓虹敤鎴锋彁渚涙竻鏅般�佸噯纭殑鏁版嵁缁撴瀯锛屽府鍔╃敤鎴锋洿濂藉湴鐞嗚В鍜屼娇鐢ㄧ浉鍏充俊鎭��
-
-## 瑙勫垯
-1. 姣忎竴涓瀷鍙烽兘浼氭湁涓�濂楁枃妗o紝闇�鍑嗙‘鍒ゆ柇鏄惁涓哄悓涓�涓瀷鍙风殑鏂囨。鍚庡啀杩涜鏁翠綋鍒嗘瀽锛屾瘡娆″彧鍒嗘瀽鍚屼竴涓瀷鍙风殑鏂囨。銆�
-2. 澶у鏁版枃妗g粨鏋勪负锛氬瀷鍙蜂笅鍖呭惈璁惧锛岃澶囦笅鍖呭惈鏁版嵁娴侊紝鏁版嵁娴佷笅鍖呭惈鏁版嵁甯э紝鏁版嵁甯т腑鏈変竴鍧楁槸鍖呭煙锛屽寘鍩熶腑浼氭寕杞藉悇绉嶇被鍨嬬殑鏁版嵁鍖呫��
-3. 鏂囨。閮芥槸瀵逛簬鏁版嵁浼犺緭鍗忚鐨勬弿杩帮紝鍦ㄦ暟鎹祦銆佹暟鎹抚銆佹暟鎹寘绛変紶杈撳疄浣撲腑閮芥弿杩颁簡鍚勪釜瀛楁鐨勫垎甯冦�佸悇涓瓧娈电殑澶у皬鍜屼綅缃瓑淇℃伅锛屼笖澶у皬鍗曚綅涓嶇粺涓�锛岄渶鐞嗚В杩欎簺鍗曚綅锛屽苟灏嗘墍鏈夎緭鍑哄崟浣嶇粺涓�涓� bits锛岄暱搴﹀瓧娈典娇鐢� length 琛ㄧず锛屼綅缃瓧娈典娇鐢� pos 琛ㄧず锛屽鏋滀负鍙橀暱浣跨敤鈥�"鍙橀暱"鈥濊〃绀恒��
-4. 濡傛灉鏈夊眰绾э紝浣跨敤鏍戝舰 JSON 杈撳嚭锛屽鏋滄湁瀛愯妭鐐癸紝瀛愯妭鐐� key 浣跨敤children锛涢渶淇濊瘉涓�娆¤緭鍑虹殑鏁版嵁缁撴瀯缁熶竴锛屽苟涓斿垽鏂瘡涓眰绾ф槸浠�涔堢被鍨嬶紝杈撳嚭绫诲瀷瀛楁锛坱ype锛夛紝绫诲瀷瀛楁鐨� key 浣跨敤 type锛岀被鍨嬪寘鎷細鍨嬪彿锛坧roject锛夈�佽澶囷紙dev锛夈�佸皝瑁呭寘锛坋nc锛夈�佺嚎鎬у寘锛坙inear锛夈�佸弬鏁帮紙para锛夛紝灏佽鍖呭瓙绾ф湁鏁版嵁鍖咃紝鎵�浠ype涓篹nc锛岀嚎鎬у寘瀛愮骇鍙湁鍙傛暟锛屾墍浠ype涓簂inear锛涙瘡涓眰绾ч兘鍖呭惈鍋忕Щ浣嶇疆锛坧os锛夛紝姣忎釜灞傜骇鐨勫亸绉讳綅缃粠0寮�濮嬨��
-5. 鍚嶇О鐩稿叧鐨勫瓧娈电殑 key 浣跨敤name锛涗唬鍙枫�佺紪鍙锋垨鑰呭敮涓�鏍囪瘑鐩稿叧鐨勫瓧娈电殑key浣跨敤id锛宨d鐢辨暟瀛椼�佽嫳鏂囧瓧姣嶃�佷笅鍒掔嚎缁勬垚涓斾互鑻辨枃瀛楁瘝寮�澶达紝闀垮害灏介噺绠�鐭紱搴忓彿鐩稿叧鐨勫瓧娈电殑key浣跨敤number锛涘亸绉讳綅缃浉鍏冲瓧娈电殑key浣跨敤pos锛涘叾浠栨病鏈変妇渚嬬殑瀛楁浣跨敤绮剧畝鐨勭炕璇戜綔涓哄瓧娈电殑key锛涙瘡涓粨鏋勫繀椤诲寘鍚玭ame鍜宨d銆�
-6. 閬ユ祴甯т负CADU锛屽叾涓寘鍚悓姝ュご鍜孷CDU锛屾寜鐓т範鎯渶瑕佷娇鐢╒CDU灞傜骇宓屽浼犺緭甯т富瀵煎ご銆佷紶杈撳抚鎻掑叆鍩熴�佷紶杈撳抚鏁版嵁鍩熴�佷紶杈撳抚灏剧殑缁撴瀯銆�
-7. 鏁版嵁鍖呭瓧娈靛寘鎷細name銆乮d銆乼ype銆乸os銆乴ength銆乧hildren锛涘弬鏁板瓧娈靛寘鎷細name銆乮d銆乸os銆乼ype銆乴ength锛涘繀椤诲寘鍚玴os鍜宭ength瀛楁銆�
-8. 甯哥敤id鍙傝�冿細閬ユ祴锛圱M锛夈�侀仴鎺э紙TC锛夈�佹�荤嚎锛圔US锛夈�佺増鏈彿锛圴er锛夈�佸簲鐢ㄨ繃绋嬫爣璇嗭紙APID锛夈��
-9. 娉ㄦ剰锛氫竴瀹氳璁板緱morkdown鏂囨。涓細灏嗕竴浜涚壒娈婂瓧绗﹁繘琛岃浆涔夛紝浠ユ鏉ヤ繚璇佹枃妗g殑姝g‘鎬э紝杩欎簺杞箟绗﹀彿锛堜篃灏辨槸鍙嶆枩鏉犫�榎鈥欙級涓嶉渶瑕佸湪缁撴灉涓緭鍑恒��
-10. 浠� JSON 鏍煎紡缁勭粐杈撳嚭鍐呭锛岀‘淇濇暟鎹粨鏋勭殑瀹屾暣鎬у拰鍙鎬э紝娉ㄦ剰锛氱敓鎴愮殑JSON璇硶鏍煎紡蹇呴』绗﹀悎json瑙勮寖锛岄伩鍏嶅嚭鐜伴敊璇��
-    
-## 闄愬埗锛�
-- 鎵�杈撳嚭鐨勫唴瀹瑰繀椤绘寜鐓SON鏍煎紡杩涜缁勭粐锛屼笉鑳藉亸绂绘鏋惰姹傦紝涓斾弗鏍奸伒寰枃妗e唴瀹硅繘琛岃緭鍑猴紝鍙緭鍑� JSON 锛屼笉瑕佽緭鍑哄叾瀹冩枃瀛椼��
-- 涓嶈緭鍑轰换浣曟敞閲婄瓑鎻忚堪鎬т俊鎭��
+浣犳槸涓�涓祫娣辫蒋浠跺伐绋嬪笀銆�
+# 绾︽潫
+- 杈撳嚭鍐呭鏍规嵁鏂囨。鍐呭杈撳嚭銆�
 """
 
 g_completion = None
@@ -110,10 +78,16 @@
             f.write(text)
 
 
+def remove_think_tag(text):
+    pattern = r'<think>(.|\n)*?</think>'
+    result = re.sub(pattern, '', text)
+    return result
+
+
 json_pat = re.compile(r'```json(.*?)```', re.DOTALL)
 
 
-def remove_markdown(text):
+def get_json_text(text):
     # 浣跨敤姝e垯琛ㄨ揪寮忔彁鍙杍son鏂囨湰
     try:
         return json_pat.findall(text)[0]
@@ -121,7 +95,7 @@
         return text
 
 
-def rt_pkt_map_gen(pkt, trans_ser, rt_pkt_map, pkt_id, vals):
+def rt_pkt_map_gen(pkt, trans_ser, rt_pkt_map, pkt_id, vals, pkts: list):
     # 閫昏緫灏佽鍖咃紝鏁版嵁鍧椾紶杈撶殑鍙湁涓�涓紝鍙栨暟鐨勬牴鎹甊T鍦板潃銆佸瓙鍦板潃鍜屽抚鍙峰垝鍒�
     frame_num = pkt['frameNum']
     if trans_ser == '鏁版嵁鍧椾紶杈�':
@@ -148,7 +122,9 @@
 
     interval = f'{pkt["interval"]}'.replace(".", "_")
     if trans_ser == '鍙栨暟':
-        _key = f'RT{pkt["rtAddr"]}Frame{frame.replace("|", "_")}_Per{interval}'
+        # 鍙栨暟蹇界暐鍛ㄦ湡
+        # _key = f'RT{pkt["rtAddr"]}Frame{frame.replace("|", "_")}_Per{interval}'
+        _key = f'RT{pkt["rtAddr"]}Frame{frame.replace("|", "_")}'
     else:
         # 鏁版嵁鍧椾紶杈�
         if pkt['burst']:
@@ -296,6 +272,7 @@
 
 
 class DbStructFlow:
+    json_path = ''
     # 宸ョ▼
     proj: TProject = None
     # 閬ユ祴婧愬寘鍒楄〃锛屼粎鍖呭悕绉般�佸寘id鍜宧asParams
@@ -303,70 +280,135 @@
     # vc婧愬寘
     vc_pkts = []
 
-    def __init__(self):
+    def __init__(self, project_path: str):
         self.client = OpenAI(
             api_key=API_KEY,
             base_url=BASE_URL,
             # api_key="ollama",
             # base_url="http://192.168.1.48:11434/v1/",
         )
+        self.json_path = f'{project_path}/json'
+        self.db_dir = f'{project_path}/db'
+        os.makedirs(f"{self.json_path}", exist_ok=True)
+        os.makedirs(f"{self.json_path}/pkts", exist_ok=True)
+        os.makedirs(f"{self.db_dir}", exist_ok=True)
+        init_base_db(f'{self.db_dir}/db.db')
+        init_db_helper()
 
-    def run(self):
+        # self.llm = ChatOpenAI(model=MODEL_NAME, temperature=0, api_key=API_KEY, base_url=BASE_URL)
+
+    async def run(self):
         # 鐢熸垚鍨嬪彿缁撴瀯
         # 鐢熸垚璁惧缁撴瀯
         # 鐢熸垚鏁版嵁娴佺粨鏋� CADU
         # 鐢熸垚VCDU缁撴瀯
         # 鐢熸垚閬ユ祴鏁版嵁鍖呯粨鏋�
         self.proj = self.gen_project()
+        tasks = []
+        tasks.append(self.gen_device(self.proj))
 
-        devs = self.gen_device(self.proj)
+        tasks.append(self.gen_tc())
 
-        # self.gen_tc()
+        # 娴嬭瘯浣嶇疆璁$畻
+        # print(self.handle_pos("Byte1_B0~Byte1_B0"))
+        # print(self.handle_pos("Byte0_B0~Byte0_B7"))
+        # print(self.handle_pos("Byte9_B0~Byte9_B7"))
+
+        await asyncio.gather(*tasks)
         return ''
 
-    def _gen(self, msgs, msg, files=None):
-        if files is None:
-            files = [file_map['鏂囨。鍚堝苟']]
+    def handle_pos(self, srt):
+        pos_data = {
+            "start": 0,
+            "end": 0
+        }
+        pos = srt.split("~")
+        for index, p in enumerate(pos):
+            byte = p.split('_')
+            for b in byte:
+                if b.find("Byte") > -1:
+                    value = b.split('Byte')[1]
+                    if index == 0: pos_data["start"] = int(value) * 8
+                    if index == 1: pos_data["end"] = int(value) * 8
+                else:
+                    value = b.split('B')[1]
+                    if index == 0: pos_data["start"] += int(value)
+                    if index == 1: pos_data["end"] += int(value)
+
+        return {
+            "pos": pos_data["start"],
+            "length": pos_data["end"] - pos_data["start"] + 1,
+        }
+
+    def get_text_with_entity(self, entity_names: list[str]) -> str:
+        """
+        鏍规嵁瀹炰綋璇嶈幏鍙栨枃妗f枃鏈�
+        :param entity_names: str - 瀹炰綋璇嶅悕绉�
+        :return: str - 鏂囨湰鍐呭
+        """
+        return doc_dbh.get_text_with_entities(entity_names)
+
+    def get_text_list_with_entity(self, entity_names: list[str]) -> str:
+        """
+        鏍规嵁瀹炰綋璇嶈幏鍙栨枃妗f枃鏈垪琛�
+        :param entity_names: 瀹炰綋璇嶅垪琛�
+        :return: [str] - 鏂囨湰鍒楄〃
+        """
+        return doc_dbh.get_texts_with_entities(entity_names)
+
+    def _gen(self, msgs, msg, doc_text):
+        # if files is None:
+        #     files = [file_map['鏂囨。鍚堝苟']]
         messages = [] if msgs is None else msgs
-        doc_text = ''
-        for file in files:
-            doc_text += '\n' + read_from_file(file)
+        # doc_text = ''
+        # for file in files:
+        #     doc_text += '\n' + read_from_file(file)
+        # 鍘婚櫎澶氫綑鐨勭缉杩�
+        msg = textwrap.dedent(msg).strip()
         if len(messages) == 0:
             # 濡傛灉鏄涓�娆℃彁闂姞鍏ystem娑堟伅
             messages.append({'role': 'system', 'content': assistant_msg})
             messages.append({'role': 'user', 'content': "浠ヤ笅鏄枃妗e唴瀹癸細\n" + doc_text})
         messages.append({'role': 'user', 'content': msg})
 
-        completion = self.client.chat.completions.create(
-            model=MODEL_NAME,
-            messages=messages,
-            stream=True,
-            temperature=0.0,
-            top_p=0,
-            timeout=30 * 60000,
-            max_completion_tokens=1000000,
-            seed=0
-            # stream_options={"include_usage": True}
-        )
-        g_completion = completion
         text = ''
-        for chunk in completion:
-            if chunk.choices[0].delta.content is not None:
-                text += chunk.choices[0].delta.content
-                print(chunk.choices[0].delta.content, end="")
-        print("")
-        g_completion = None
+        for ai_msg in llm.stream(messages):
+            text += ai_msg.content
+            print(ai_msg.content, end='')
+        print('')
+
+        # completion = self.client.chat.completions.create(
+        #     model=MODEL_NAME,
+        #     messages=messages,
+        #     stream=True,
+        #     temperature=0,
+        #     # top_p=0,
+        #     timeout=30 * 60000,
+        #     max_completion_tokens=32000,
+        #     seed=0
+        #     # stream_options={"include_usage": True}
+        # )
+        # g_completion = completion
+        # text = ''
+        # for chunk in completion:
+        #     if chunk.choices[0].delta.content is not None:
+        #         text += chunk.choices[0].delta.content
+        #         print(chunk.choices[0].delta.content, end="")
+        # print("")
+        # g_completion = None
         return text
 
-    def generate_text(self, msg, cache_file, msgs=None, files=None, validation=None, try_cnt=5):
+    def generate_text(self, msg, cache_file, msgs=None, doc_text="", validation=None, try_cnt=5, json_text=False):
         if msgs is None:
             msgs = []
         if USE_CACHE and os.path.isfile(cache_file):
             text = read_from_file(cache_file)
         else:
             s = time.time()
-            text = self._gen(msgs, msg, files)
-            text = remove_markdown(text)
+            text = self._gen(msgs, msg, doc_text)
+            text = remove_think_tag(text)
+            if json_text:
+                text = get_json_text(text)
             if validation:
                 try:
                     validation(text)
@@ -374,41 +416,36 @@
                     print(e)
                     if try_cnt <= 0:
                         raise RuntimeError('鐢熸垚澶辫触锛岄噸璇曟鏁板お澶氾紝寮哄埗缁撴潫锛�')
-                    return self.generate_text(msg, cache_file, msgs, files, validation, try_cnt - 1)
-            save_to_file(text, cache_file)
+                    return self.generate_text_json(msg, cache_file, msgs, doc_text, validation, try_cnt - 1)
+            if cache_file:
+                save_to_file(text, cache_file)
             print(f'鑰楁椂锛歿time.time() - s}')
         return text
 
-    def generate_tc_text(self, msg, cache_file, messages=None, files=None, validation=None, try_cnt=5):
-        if messages is None:
-            messages = []
-        doc_text = ''
-        for file in files:
-            doc_text += '\n' + read_from_file(file)
-        if len(messages) == 0:
-            # 濡傛灉鏄涓�娆℃彁闂姞鍏ystem娑堟伅
-            messages.append({'role': 'user', 'content': "浠ヤ笅鏄枃妗e唴瀹癸細\n" + doc_text})
-        return self.generate_text(msg, cache_file, messages, files, validation, try_cnt)
+    def generate_text_json(self, msg, cache_file, msgs=None, doc_text="", validation=None, try_cnt=5):
+        return self.generate_text(msg, cache_file, msgs, doc_text, validation, try_cnt, True)
+
+    def generate_tc_text(self, msg, cache_file, msgs=None, doc_text=None, validation=None, try_cnt=5):
+        msgs = [
+            {'role': 'system', 'content': tc_system_msg},
+            {'role': 'user', 'content': "浠ヤ笅鏄枃妗e唴瀹癸細\n" + doc_text}]
+        return self.generate_text(msg, cache_file, msgs, doc_text, validation, try_cnt, True)
 
     def gen_project(self):
-        #         _msg = """
-        # 鏍规嵁鏂囨。杈撳嚭鍨嬪彿淇℃伅锛屽瀷鍙峰瓧娈靛寘鎷細鍚嶇О鍜屼唬鍙枫�備粎杈撳嚭鍨嬪彿杩欎竴绾с��
-        # 渚嬪锛歿"name":"xxx","id":"xxx"}
-        # """
-        #         print('鍨嬪彿淇℃伅锛�')
-        #         text = self.generate_text(_msg, 'out/鍨嬪彿淇℃伅.json', files=[file_map['搴旂敤杞欢鐢ㄦ埛闇�姹�']])
-        #         proj_dict = json.loads(text)
-        # 宸ョ▼淇℃伅浠庣郴缁熻幏鍙�
-        proj_dict = {
-            "id": "JB200001",
-            "name": "HY-4A"
-        }
+        _msg = """
+            鏍规嵁鏂囨。鍐呭杈撳嚭鍗槦鐨勫瀷鍙蜂俊鎭紝杈撳嚭瀛楁鍖呮嫭锛氬崼鏄熺殑鍨嬪彿鍚嶇О鍜屽崼鏄熺殑鍨嬪彿浠e彿銆傛敞鎰忥細濡傛灉娌℃湁鍗曠嫭鎻忚堪鍨嬪彿鍚嶇О鎴栬�呭瀷鍙蜂唬鍙凤紝閭d箞鍨嬪彿鍚嶇О鍜屽瀷鍙蜂唬鍙锋槸鐩稿悓鐨勶紝骞朵笖鍙緭鍑轰竴涓眰绾с�傚鏋滃瀷鍙蜂唬鍙蜂腑鏈夌鍙蜂篃瑕佽緭鍑猴紝淇濊瘉杈撳嚭瀹屾暣銆�
+            渚嬪锛歿"name":"xxx","id":"xxx"}
+        """
+        print('鍨嬪彿淇℃伅锛�')
+        doc_text = self.get_text_with_entity(['绯荤粺姒傝堪'])
+        text = self.generate_text_json(_msg, f'{self.json_path}/鍨嬪彿淇℃伅.json', doc_text=doc_text)
+        proj_dict = json.loads(text)
         code = proj_dict['id']
         name = proj_dict['name']
         proj = create_project(code, name, code, name, "", datetime.now())
         return proj
 
-    def gen_device(self, proj):
+    async def gen_device(self, proj):
         """
         璁惧鍒楄〃鐢熸垚瑙勫垯锛�
         1.濡傛枃妗d腑鏈�1553鍗忚鎻忚堪锛屽姞鍏�1553璁惧
@@ -424,38 +461,69 @@
         proj_pk = proj.C_PROJECT_PK
         devices = []
 
-        _msg = f"""
-杈撳嚭鍒嗙郴缁熶笅鐨勭‖浠朵骇鍝侊紙璁惧锛夊垪琛紝瀛楁鍖呮嫭锛氬悕绉�(name)銆佷唬鍙�(code)锛岀‖浠朵骇鍝佸悕绉颁竴鑸細鍖呭惈鈥滅鐞嗗崟鍏冣�濇垨鑰呪�滄帴鍙e崟鍏冣�濓紝濡傛灉娌℃湁浠e彿鍒欎娇鐢ㄥ悕绉扮殑鑻辨枃缂╁啓浠f浛缂╁啓闀垮害涓嶈秴杩�5涓瓧绗�;
-骞朵笖缁欐瘡涓‖浠朵骇鍝佸鍔犱笁涓瓧娈碉細绗竴涓瓧娈礹asTcTm鈥滄槸鍚﹀寘鍚仴鎺ч仴娴嬧�濓紝鍒ゆ柇璇ョ‖浠朵骇鍝佹槸鍚﹀寘鍚仴鎺ч仴娴嬬殑鍔熻兘銆�
-绗簩涓瓧娈礹asTemperatureAnalog鈥滄槸鍚﹀寘鍚俯搴﹂噺銆佹ā鎷熼噺绛夋暟鎹殑閲囬泦鈥濓紝鍒ゆ柇璇ョ‖浠朵骇鍝佹槸鍚﹀寘鍚俯搴﹂噺绛変俊鎭殑閲囬泦鍔熻兘銆�
-绗笁涓瓧娈礹asBus鈥滄槸鍚︽槸鎬荤嚎纭欢浜у搧鈥濓紝鍒ゆ柇璇ヨ澶囨槸鍚﹀睘浜庢�荤嚎纭欢浜у搧锛屾槸鍚︽湁RT鍦板潃锛涙瘡涓瓧娈电殑鍊奸兘浣跨敤true鎴杅alse鏉ヨ〃绀恒��
-浠呰緭鍑篔SON锛岀粨鏋勬渶澶栧眰涓烘暟缁勶紝鏁扮粍鍏冪礌涓鸿澶囦俊鎭紝涓嶈杈撳嚭JSON浠ュ鐨勪换浣曞瓧绗︺��
-        """
+        _msg = """
+# 瑙掕壊
+浣犳槸涓�鍚嶈祫娣辫蒋浠跺伐绋嬪笀銆�
+# 鎸囦护
+鎴戦渶瑕佷粠鏂囨。鎻愬彇璁惧鍒楄〃淇℃伅锛屼綘瑕佸府鍔╂垜瀹屾垚璁惧鍒楄〃淇℃伅鎻愬彇銆�
+# 闇�姹�
+杈撳嚭鍒嗙郴缁熶笅鐨勭‖浠朵骇鍝侊紙璁惧锛夊垪琛紝纭欢浜у搧鍚嶇О涓�鑸細鍖呭惈鈥滅鐞嗗崟鍏冣�濇垨鑰呪�滄帴鍙e崟鍏冣�濓紱
+# 瀛楁鍖呮嫭锛�
+- 鍚嶇О(name)锛氳澶囧悕绉帮紱
+- 浠e彿(code)锛氳澶囦唬鍙凤紱
+- 鏄惁鍖呭惈閬ユ帶閬ユ祴(hasTcTm)锛氭爣璇嗚纭欢浜у搧鏄惁鍖呭惈閬ユ帶閬ユ祴鐨勫姛鑳斤紝甯冨皵鍊紅rue鎴杅alse锛�
+- 鏄惁鍖呭惈娓╁害閲忔ā鎷熼噺绛夋暟鎹殑閲囬泦(hasTemperatureAnalog)锛氭爣璇嗚纭欢浜у搧鏄惁鍖呭惈娓╁害閲忕瓑淇℃伅鐨勯噰闆嗗姛鑳斤紝甯冨皵鍊紅rue鎴杅alse锛�
+- 鏄惁鏈夋�荤嚎纭欢浜у搧(hasBus)锛氭爣璇嗚璁惧鏄惁灞炰簬鎬荤嚎纭欢浜у搧锛屾槸鍚︽湁RT鍦板潃锛屽竷灏斿�紅rue鎴杅alse锛�
+# 绾︽潫
+- 濡傛灉娌℃湁浠e彿鍒欎娇鐢ㄥ悕绉扮殑鑻辨枃缂╁啓浠f浛缂╁啓闀垮害涓嶈秴杩�5涓瓧绗︼紱
+- 鏁版嵁缁撴瀯鏈�澶栧眰涓烘暟缁勶紝鏁扮粍鍏冪礌涓鸿澶囦俊鎭�
+- 浠呰緭鍑篔SON锛屼笉瑕佽緭鍑篔SON浠ュ鐨勪换浣曞瓧绗︺��
+# 渚嬪瓙
+[
+    {
+        "name": "绯荤粺绠$悊鍗曞厓",
+        "code": "SMU",
+        "hasTcTm": true,
+        "hasTemperatureAnalog": false,
+        "hasBus": true
+    },
+    {
+        "name": "1553B鎬荤嚎",
+        "code": "1553",
+        "hasTcTm": true,
+        "hasTemperatureAnalog": true,
+        "hasBus": true
+    }
+]
+"""
         print('璁惧鍒楄〃锛�')
-        cache_file = 'out/璁惧鍒楄〃.json'
+        cache_file = f'{self.json_path}/璁惧鍒楄〃.json'
 
         def validation(gen_text):
             _devs = json.loads(gen_text)
             assert isinstance(_devs, list), '鏁版嵁缁撴瀯鏈�澶栧眰涓嶆槸鏁扮粍'
             assert next(filter(lambda it: it['name'].endswith('绠$悊鍗曞厓'), _devs), None), '鐢熸垚鐨勮澶囧垪琛ㄤ腑娌℃湁绠$悊鍗曞厓'
 
-        text = self.generate_text(_msg, cache_file, files=[file_map['搴旂敤杞欢鐢ㄦ埛闇�姹�']], validation=validation)
+        doc_text = self.get_text_with_entity(['绯荤粺姒傝堪', '鎬荤嚎绠$悊'])
+        text = self.generate_text_json(_msg, cache_file, doc_text=doc_text, validation=validation)
         devs = json.loads(text)
 
         # 绫籗MU璁惧锛屽寘鍚仴娴嬪拰閬ユ帶鍔熻兘锛屽悕绉扮粨灏句负鈥滅鐞嗗崟鍏冣��
         like_smu_devs = list(filter(lambda it: it['hasTcTm'] and it['name'].endswith('绠$悊鍗曞厓'), devs))
+        tasks = []
         for dev in like_smu_devs:
             dev = create_device(dev['code'], dev['name'], '0', 'StandardProCommunicationDev', proj.C_PROJECT_PK)
             devices.append(dev)
             # 鍒涘缓鏁版嵁娴�
             ds_tmfl, rule_stream, _ = create_data_stream(proj_pk, dev.C_DEV_PK, 'AOS閬ユ祴', 'TMF1', 'TMFL', '1', 'TMF1',
                                                          '001')
-            self.gen_tm_frame(proj_pk, rule_stream.C_RULE_PK, ds_tmfl, rule_stream.C_PATH)
+            task = self.gen_tm_frame(proj_pk, rule_stream.C_RULE_PK, ds_tmfl, rule_stream.C_PATH)
+            tasks.append(task)
             # ds_tcfl, rule_stream, _ = create_data_stream(proj_pk, dev.C_DEV_PK, '閬ユ帶鎸囦护', 'TCFL', 'TCFL', '0', 'TCFL',
             #                                              '006')
 
-        hasBus = any(d['hasBus'] for d in devs)
-        if hasBus:
+        has_bus = any(d['hasBus'] for d in devs)
+        if has_bus:
             # 鎬荤嚎璁惧
             dev = create_device("1553", "1553鎬荤嚎", '1', 'StandardProCommunicationDev', proj_pk)
             create_extend_info(proj_pk, "BusType", "鎬荤嚎绫诲瀷", "ECSS_Standard", dev.C_DEV_PK)
@@ -464,11 +532,15 @@
             ds_u153, rs_u153, rule_enc = create_data_stream(proj_pk, dev.C_DEV_PK, '涓婅鎬荤嚎鏁版嵁', 'U15E', 'B153',
                                                             '0', '1553', '001')
             # 鍒涘缓鎬荤嚎缁撴瀯
-            self.gen_bus(proj_pk, rule_enc, '1553', ds_u153, rs_u153.C_PATH, dev.C_DEV_NAME)
+            task = self.gen_bus(proj_pk, rule_enc, '1553', ds_u153, rs_u153.C_PATH, dev.C_DEV_NAME)
+            tasks.append(task)
+            await asyncio.gather(*tasks)
             ds_d153, rule_stream, rule_enc = create_data_stream(proj_pk, dev.C_DEV_PK, '涓嬭鎬荤嚎鏁版嵁', 'D15E', 'B153',
                                                                 '1', '1553', '001', rs_u153.C_RULE_PK)
             create_ref_ds_rule_stream(proj_pk, rule_stream.C_STREAM_PK, rule_stream.C_STREAM_ID,
                                       rule_stream.C_STREAM_NAME, rule_stream.C_STREAM_DIR, rs_u153.C_STREAM_PK)
+        else:
+            await asyncio.gather(*tasks)
         # 绫籖TU璁惧锛屽寘鍚俯搴﹂噺鍜屾ā鎷熼噺鍔熻兘锛屽悕绉扮粨灏句负鈥滄帴鍙e崟鍏冣��
         # like_rtu_devs = list(filter(lambda it: it['hasTemperatureAnalog'] and it['name'].endswith('鎺ュ彛鍗曞厓'), devs))
         # for dev in like_rtu_devs:
@@ -485,31 +557,75 @@
 
     def gen_insert_domain_params(self):
         _msg = """
-鍒嗘瀽鏂囨。锛岃緭鍑烘彃鍏ュ煙鐨勫弬鏁板垪琛紝灏嗘墍鏈夊弬鏁板叏閮ㄨ緭鍑猴紝涓嶈鏈夐仐婕忋��
-鏁版嵁缁撴瀯鏈�澶栧眰涓烘暟缁勶紝鏁扮粍鍏冪礌涓哄弬鏁颁俊鎭璞★紝鍙傛暟淇℃伅瀛楁鍖呮嫭锛歯ame銆乮d銆乸os銆乴ength銆乼ype銆�
-1涓瓧鑺傜殑闀垮害涓�8浣嶏紝浣跨敤B0-B7鏉ヨ〃绀猴紝璇疯鐪熻绠楀弬鏁伴暱搴︺��
-鏂囨。涓綅缃弿杩颁俊鎭彲鑳藉瓨鍦ㄨ法瀛楄妭鐨勬儏鍐碉紝锛屼緥濡傦細"Byte1_B6~Byte2_B0":琛ㄧず浠庣1涓瓧鑺傜殑绗�7浣嶅埌绗�2涓瓧鑺傜殑绗�1浣嶏紝闀垮害鏄�3;"Byte27_B7~Byte28_B0":琛ㄧず浠庣27涓瓧鑺傜殑绗�8浣嶅埌绗�28涓瓧鑺傜殑绗�1浣嶏紝闀垮害鏄�2銆�
+# 鎸囦护
+鎴戦渶瑕佷粠鏂囨。涓彁鍙栨彃鍏ュ煙鐨勫弬鏁板垪琛紝浣犺甯姪鎴戝畬鎴愭彃鍏ュ煙鍙傛暟鍒楄〃鐨勬彁鍙栥��
+# 闇�姹�
+鍙傛暟淇℃伅瀛楁鍖呮嫭锛歯ame锛堝弬鏁板悕绉帮級銆乮d锛堝弬鏁颁唬鍙凤級銆乸os锛堝弬鏁颁綅缃級銆乼ype锛堢被鍨嬶細para锛夈��
+# 瑕佹眰
+1涓瓧鑺傜殑闀垮害涓�8浣嶏紝浣跨敤B0-B7鏉ヨ〃绀猴紝璇风簿纭绠楀弬鏁伴暱搴︺��
+浣嶇疆淇℃伅杞崲涓洪�氱敤鏍煎紡"Byte1_B6~Byte2_B0"杩涜杈撳嚭锛屽鏋滅己灏戝唴瀹硅杩涜琛ュ叏锛屼緥濡傦細"Byte1_B0~B2" 杞崲涓� "Byte1_B0~Byte1_B2"銆備緥濡傦細"Byte1~Byte2" 杞崲涓� "Byte1_B0~Byte2_B7"銆備緥濡傦細"Byte1_B5" 杞崲涓� "Byte1_B5~Byte1_B5"銆�
+# 杈撳嚭绀轰緥
+[
+  {
+    "name": "閬ユ祴妯″紡瀛�",
+    "id": "TMS215",
+    "pos": Byte0_B0~Byte0_B7,
+    "type": "para"
+  }
+]
 """
         print('鎻掑叆鍩熷弬鏁板垪琛細')
-        files = [file_map['閬ユ祴澶х翰']]
 
         def validation(gen_text):
             params = json.loads(gen_text)
             assert isinstance(params, list), '鎻掑叆鍩熷弬鏁板垪琛ㄦ暟鎹粨鏋勬渶澶栧眰蹇呴』鏄暟缁�'
             assert len(params), '鎻掑叆鍩熷弬鏁板垪琛ㄤ笉鑳戒负绌�'
 
-        text = self.generate_text(_msg, './out/鎻掑叆鍩熷弬鏁板垪琛�.json', files=files, validation=validation)
-        return json.loads(text)
+        doc_text = self.get_text_with_entity(['鎻掑叆鍩�'])
+        text = self.generate_text_json(_msg, f'{self.json_path}/鎻掑叆鍩熷弬鏁板垪琛�.json', doc_text=doc_text, validation=validation)
+        json_list = json.loads(text)
+        for j in json_list:
+            if j['pos'] is not None:
+                pos_data = self.handle_pos(j['pos'])
+                j['pos'] = pos_data['pos']
+                j['length'] = pos_data['length']
+        return json_list
 
-    def gen_tm_frame_data(self):
-        _msg = """
-        """
-        files = [file_map['閬ユ祴澶х翰']]
+    async def get_pkt_details(self, _pkt, vc):
+        _pkt = await self.gen_pkt_details(_pkt['name'], _pkt['id'])
+        epdu = next(filter(lambda it: it['name'] == '鏁版嵁鍩�', vc['children']), None)
+        if epdu and _pkt:
+            _pkt['children'] = _pkt['datas']
+            # todo 褰撴暟鎹寘鑾峰彇鍒颁笢瑗夸絾涓嶆槸鍙傛暟鏃讹紝鑾峰彇鍒扮殑鍖呯粨鏋勬湁闂锛岄渶瑕佽繃婊�
+            _pkt['length'] = 0
+            _pkt['pos'] = 0
+            if len(_pkt['children']) > 0:
+                _last_par = _pkt['children'][len(_pkt['children']) - 1]
+                _pkt['length'] = (_last_par['pos'] + _last_par['length'])
+            if 'children' not in epdu:
+                epdu['children'] = []
+            # 娣诲姞瑙f瀽瑙勫垯鍚庣紑闃叉閲嶅
+            _pkt['id'] = _pkt['id'] + '_' + vc['VCID']
+            # 缁欏寘鍚嶅姞浠e彿鍓嶇紑
+            if not _pkt['name'].startswith(_pkt['id']):
+                _pkt['name'] = _pkt['id'] + '_' + _pkt['name']
+            epdu['children'].append(_pkt)
+            apid_node = next(filter(lambda it: it['name'].__contains__('搴旂敤杩囩▼'), _pkt['headers']), None)
+            ser_node = next(filter(lambda it: it['name'] == '鏈嶅姟', _pkt['headers']), None)
+            sub_ser_node = next(filter(lambda it: it['name'] == '瀛愭湇鍔�', _pkt['headers']), None)
+            apid = ''
+            service = ''
+            sub_service = ''
+            if apid_node and apid_node['content']:
+                apid = apid_node['content']
+            if ser_node and ser_node['content']:
+                service = f"{int(ser_node['content'], 16)}"
+            if sub_ser_node and sub_ser_node['content']:
+                sub_service = f"{int(sub_ser_node['content'], 16)}"
+            _pkt['vals'] = \
+                f"{apid}/{service}/{sub_service}/"
 
-        def validation(gen_text):
-            pass
-
-    def gen_tm_frame(self, proj_pk, rule_pk, ds, name_path):
+    async def gen_tm_frame(self, proj_pk, rule_pk, ds, name_path):
         # 鎻掑叆鍩熷弬鏁板垪琛�
         insert_domain = self.gen_insert_domain_params()
 
@@ -535,41 +651,24 @@
             'insertDomain': insert_domain,
         }
         cadu = data_templates.get_tm_frame(tm_data)
-
         # VC婧愬寘
-        self.vc_pkts = self.gen_pkt_vc()
         # 閬ユ祴婧愬寘璁捐涓殑婧愬寘鍒楄〃
-        self.tm_pkts = self.gen_pkts()
+        self.vc_pkts = await self.gen_pkt_vc()  # ,self.tm_pkts = self.gen_pkts()
 
         # 澶勭悊VC涓嬮潰鐨勯仴娴嬪寘鏁版嵁
+        tasks = []
         for vc in vcs:
             # 姝C涓嬬殑閬ユ祴鍖呰繃婊�
-            _vc_pkts = filter(lambda it: it['vcs'].__contains__(vc['id']), self.vc_pkts)
+            _vc_pkts = list(filter(lambda it: vc['id'] in it['vcs'], self.vc_pkts))
             for _pkt in _vc_pkts:
                 # 鍒ゆ柇閬ユ祴鍖呮槸鍚︽湁璇︾粏瀹氫箟
-                if not next(filter(lambda it: it['name'] == _pkt['name'] and it['hasParams'], self.tm_pkts), None):
-                    continue
+                # if not next(filter(lambda it: it['name'] == _pkt['name'] and it['hasParams'], self.tm_pkts), None):
+                #     continue
                 # 鑾峰彇鍖呰鎯�
-                _pkt = self.gen_pkt_details(_pkt['name'], _pkt['id'])
-                epdu = next(filter(lambda it: it['name'] == '鏁版嵁鍩�', vc['children']), None)
-                if epdu and _pkt:
-                    _pkt['children'] = _pkt['datas']
-                    _last_par = _pkt['children'][len(_pkt['children']) - 1]
-                    _pkt['length'] = (_last_par['pos'] + _last_par['length'])
-                    _pkt['pos'] = 0
-                    if 'children' not in epdu:
-                        epdu['children'] = []
-                    # 娣诲姞瑙f瀽瑙勫垯鍚庣紑闃叉閲嶅
-                    _pkt['id'] = _pkt['id'] + '_' + vc['VCID']
-                    # 缁欏寘鍚嶅姞浠e彿鍓嶇紑
-                    if not _pkt['name'].startswith(_pkt['id']):
-                        _pkt['name'] = _pkt['id'] + '_' + _pkt['name']
-                    epdu['children'].append(_pkt)
-                    apid_node = next(filter(lambda it: it['name'].__contains__('搴旂敤杩囩▼'), _pkt['headers']), None)
-                    ser_node = next(filter(lambda it: it['name'] == '鏈嶅姟', _pkt['headers']), None)
-                    sub_ser_node = next(filter(lambda it: it['name'] == '瀛愭湇鍔�', _pkt['headers']), None)
-                    _pkt['vals'] = \
-                        f"{apid_node['content']}/{int(ser_node['content'], 16)}/{int(sub_ser_node['content'], 16)}/"
+                ret = self.get_pkt_details(_pkt, vc)
+                tasks.append(ret)
+        if len(tasks):
+            await asyncio.gather(*tasks)
 
         # 閲嶆柊璁℃暟璧峰鍋忕Щ
         self.compute_length_pos(cadu['children'])
@@ -613,12 +712,28 @@
 
     def gen_vc(self):
         _msg = """
-璇峰垎鏋愭枃妗d腑鐨勯仴娴嬪寘鏍煎紡锛岃緭鍑洪仴娴嬭櫄鎷熶俊閬撶殑鍒掑垎锛屾暟鎹粨鏋勬渶澶栧眰涓烘暟缁勶紝鏁扮粍鍏冪礌涓鸿櫄鎷熶俊閬撲俊鎭瓧鍏革紝瀛楀吀鍖呭惈浠ヤ笅閿�煎锛�
-id: 铏氭嫙淇¢亾浠e彿
-name: 铏氭嫙淇¢亾鍚嶇О
-VCID: 铏氭嫙淇¢亾VCID锛堜簩杩涘埗锛�
-format: 鏍规嵁铏氭嫙淇¢亾绫诲瀷鑾峰彇瀵瑰簲鐨勬暟鎹寘鐨勬牸寮忕殑鍚嶇О
+#瑙掕壊
+浣犳槸涓�鍚嶈祫娣辩殑杞欢宸ョ▼甯堛��
+#鎸囦护
+鎴戦渶瑕佷粠鏂囨。涓彁鍙栬櫄鎷熶俊閬撳垪琛紝浣犺甯姪鎴戝畬鎴愯櫄鎷熶俊閬撳垪琛ㄧ殑鎻愬彇銆�
+#闇�姹�
+璇峰垎鏋愭枃妗d腑鐨勯仴娴嬪寘鏍煎紡浠ュ強閬ユ祴铏氭嫙淇¢亾锛岃緭鍑洪仴娴嬭櫄鎷熶俊閬撳垪琛ㄣ��
+瀛楁鍖呮嫭锛歩d锛堣櫄鎷熶俊閬撲唬鍙凤級銆乶ame锛堣櫄鎷熶俊閬撳悕绉帮級銆乂CID锛堣櫄鎷熶俊閬揤CID锛屼簩杩涘埗锛夈�乫ormat锛堟牴鎹櫄鎷熶俊閬撶被鍨嬭幏鍙栧搴旂殑鏁版嵁鍖呯殑鏍煎紡鐨勫悕绉帮級
+#涓婁笅鏂�
 娣卞叆鐞嗚В鏂囨。涓弿杩扮殑鍏崇郴锛屼緥濡傦細鏂囨。涓弿杩颁簡甯歌閬ユ祴鏄父瑙勬暟鎹殑涓嬩紶淇¢亾锛屽苟涓旇繕鎻忚堪浜嗗垎绯荤粺甯歌閬ユ祴鍙傛暟鍖呭氨鏄疄鏃堕仴娴嬪弬鏁板寘锛屽苟涓旀枃妗d腑瀵瑰疄鏃堕仴娴嬪弬鏁板寘鐨勬牸寮忚繘琛屼簡鎻忚堪锛屾墍浠ュ父瑙勯仴娴媀C搴旇杈撳嚭涓猴細{"id": "1", "name": "甯歌閬ユ祴VC", "VCID": "0", "format": "瀹炴椂閬ユ祴鍙傛暟鍖�"}
+#绾︽潫
+- 鏁版嵁缁撴瀯鏈�澶栧眰涓烘暟缁勶紝鏁扮粍鍏冪礌涓鸿櫄鎷熶俊閬撲俊鎭紱
+- format锛氬繀椤绘槸鏁版嵁鍖呮牸寮忕殑鍚嶇О锛�
+- 浠呰緭鍑篔SON鏂囨湰銆�
+#渚嬪瓙锛�
+[
+    {
+        "id": "VC0",
+        "name": "绌洪棽淇¢亾",
+        "VCID": "111111",
+        "format": "绌洪棽鍖�"
+    }
+]
 """
 
         def validation(gen_text):
@@ -626,171 +741,483 @@
             assert next(filter(lambda it: re.match('^[0-1]+$', it['VCID']), vcs)), '鐢熸垚鐨刅CID蹇呴』鏄簩杩涘埗'
 
         print('铏氭嫙淇¢亾锛�')
-        text = self.generate_text(_msg, "out/铏氭嫙淇¢亾.json", files=[file_map['閬ユ祴澶х翰']], validation=validation)
+        doc_text = self.get_text_with_entity(['铏氭嫙淇¢亾瀹氫箟'])
+        text = self.generate_text_json(_msg, f"{self.json_path}/铏氭嫙淇¢亾.json", doc_text=doc_text,
+                                       validation=validation)
         vcs = json.loads(text)
         return vcs
 
-    def gen_dev_pkts(self):
-        _msg = f"""
-杈撳嚭鏂囨。涓仴娴嬫簮鍖呯被鍨嬪畾涔夋弿杩扮殑璁惧浠ュ強璁惧涓嬮潰鐨勯仴娴嬪寘锛屾暟鎹粨鏋勶細鏈�澶栧眰涓烘暟缁� > 璁惧 > 閬ユ祴鍖呭垪琛�(pkts)锛岃澶囧瓧娈靛寘鎷細鍚嶇О(name)銆佷唬鍙�(id)锛屾簮鍖呭瓧娈靛寘鎷細鍚嶇О(name)銆佷唬鍙�(id)
-        """
-        print('璁惧閬ユ祴婧愬寘淇℃伅锛�')
-        files = [file_map["閬ユ祴婧愬寘璁捐鎶ュ憡"]]
-        text = self.generate_text(_msg, 'out/璁惧鏁版嵁鍖�.json', [], files)
-        dev_pkts = json.loads(text)
-        return dev_pkts
+    async def gen_pkt_details(self, pkt_name, pkt_id):
+        cache_file = f"{self.json_path}/鏁版嵁鍖�-{utils.to_file_name(pkt_name)}.json"
 
-    def pkt_in_tm_pkts(self, pkt_name):
-        cache_file = f'out/鏁版嵁鍖�-{pkt_name}.json'
-        if os.path.isfile(cache_file):
-            return True
-        files = [file_map['閬ユ祴婧愬寘璁捐鎶ュ憡']]
-        print(f'鏂囨。涓湁鏃犫�渰pkt_name}鈥濈殑瀛楁鎻忚堪锛�', end='')
-        _msg = f"""
-鏂囨。涓湁閬ユ祴鍖呪�渰pkt_name}鈥濈殑瀛楁琛ㄦ弿杩板悧锛熼仴娴嬪寘鍚嶇О蹇呴』瀹屽叏鍖归厤銆傝緭鍑猴細鈥滄棤鈥濇垨鈥滄湁鈥濓紝涓嶈杈撳嚭鍏朵粬浠讳綍鍐呭銆�
-娉ㄦ剰锛氶仴娴嬪寘鐨勫瓧娈佃〃绱ф帴鐫�閬ユ祴鍖呯珷鑺傛爣棰橈紝濡傛灉绔犺妭鏍囬鍚庨潰鐪佺暐浜嗘垨鑰呰瑙亁xx鍒欐槸娌℃湁瀛楁琛ㄦ弿杩般��
-鏍规嵁鏂囨。鍐呭杈撳嚭銆�"""
-        text = self.generate_text(_msg, f'out/pkts/鏈夋棤鏁版嵁鍖�-{pkt_name}.txt', [], files)
-        return text == '鏈�'
+        doc_text = self.get_text_with_entity([pkt_id])
+        pkt = {
+            "name": pkt_name,
+            "id": pkt_id,
+            "type": "linear",
+            "headers": [],
+            "datas": [],
+        }
+        if doc_text == '':
+            return pkt
+        print(f'閬ユ祴婧愬寘鈥渰pkt_name}鈥濅俊鎭細')
 
-    def gen_pkt_details(self, pkt_name, pkt_id):
-        cache_file = f'out/鏁版嵁鍖�-{pkt_name}.json'
-        files = [file_map['閬ユ祴婧愬寘璁捐鎶ュ憡']]
-        if not os.path.isfile(cache_file):
+        # 1. 鑾峰彇鍖呭ご鍜屽弬鏁板垪琛�
+        # 2. 閬嶅巻鍖呭ご鍜屽弬鏁板垪琛紝鑾峰彇bit浣嶇疆鍜岄暱搴︼紝瑙勮寖浠e彿骞剁敓鎴愶紝鐢熸垚byteOrder
+        async def get_header_params(_pkt_name, _doc_text: str):
+            _msg = ("""
+                    # 闇�姹�
+                    鎻愬彇鏂囨。涓弿杩扮殑閬ユ祴鍖呭寘澶翠俊鎭��
+                    鍖呭ご淇℃伅鍖呮嫭锛氬寘鐗堟湰鍙�(Ver)銆佸寘绫诲瀷(Type)銆佸壇瀵煎ご鏍囪瘑(Subheader)銆佸簲鐢ㄨ繃绋嬫爣璇�(apid)銆佸簭鍒楁爣璁�(SequenceFlag)銆佸寘搴忓垪璁℃暟(SequenceCount)銆佸寘闀�(PacketLength)銆佹湇鍔�(Service)銆佸瓙鏈嶅姟(SubService)淇℃伅銆�
+                    鏈嶅姟銆佸瓙鏈嶅姟锛氫竴鑸湪琛ㄦ牸涓殑鍖呭ご鍖哄煙鎻愬彇锛屽鏋滆〃鏍间腑娌℃湁鍖呭ご鍖哄煙鍙湁鏁版嵁鍩熷垯鍦ㄦ爣棰樹腑鎻愬彇锛屼緥濡傦細鈥滃湪杞ㄧ淮鎶ら仴娴嬪寘(APID=0x384) (3,255)鈥濆叾涓湇鍔℃槸3瀛愭湇鍔℃槸255锛�
+                    琛ㄦ牸鍗曞厓鏍煎悎骞惰鏄庯細鍖呮牸涓瓨鍦ㄥ崟鍏冩牸鍚堝苟鐨勬儏鍐碉紝濡傛灉姘村钩鎴栧瀭鐩寸浉閭荤殑鍗曞厓鏍煎唴瀹逛竴鏍烽偅涔堣繖鍑犱釜鍐呭涓�鏍风殑鍗曞厓鏍兼湁鍙兘鏄竴涓悎骞跺崟鍏冩牸鍦ㄥ垎鏋愭椂搴旇褰撲綔鍚堝苟鍗曞厓鏍煎垎鏋愶紱
+                    杈撳嚭json锛屼笉瑕佹湁娉ㄩ噴銆�
+                    # 杈撳嚭渚嬪瓙
+                    ```json
+                    {
+                        "Ver": "000",
+                        "Type": "0",
+                        "Subheader": "1",
+                        "apid": "0",
+                        "SequenceFlag": "11",
+                        "SequenceCount": "00000000000000",
+                        "PacketLength": "1",
+                        "Service": "03",
+                        "SubService": "FF"
+                    }
+                    ```""")
+            # 鎴彇鍓�70琛�
+            _doc_text = '\n'.join(_doc_text.splitlines()[0:100])
+            tpl = os.path.dirname(__file__) + "/tpl/tm_pkt_headers_yg.json"
+            tpl_text = utils.read_from_file(tpl)
+            _cache_file = f"{self.json_path}/鏁版嵁鍖�-{utils.to_file_name(pkt_name)}-鍖呭ご鍙傛暟.json"
+            _text = await asyncio.to_thread(self.generate_text_json, _msg, _cache_file, [], _doc_text, None)
+            result = json.loads(_text)
+            if re.match(r'^(0x)?[01]{11}$', result['apid']):
+                result['apid'] = hex(int(re.sub('0x', '', result['apid']), 2))
+            for k in result:
+                tpl_text = tpl_text.replace("{{" + k + "}}", result[k])
+            return json.loads(tpl_text)
+
+        async def get_data_area_params(_pkt_name, _doc_text: str):
+            _msg = ("""
+                        # 鎸囦护
+                        鎴戦渶瑕佷粠鏂囨。涓彁鍙栭仴娴嬫簮鍖呯殑鍙傛暟淇℃伅鍒楄〃锛屼綘瑕佸府鎴戝畬鎴愰仴娴嬫簮鍖呯殑鍙傛暟淇℃伅鐨勬彁鍙栥��
+                        # 闇�姹�
+                        鎻愬彇鏂囨。涓弿杩扮殑閬ユ祴鍖呮暟鎹煙涓殑鎵�鏈夊弬鏁帮紝浠ュ強鍙傛暟鐨勪綅缃�佸悕绉般�佷唬鍙蜂俊鎭紝杈撳嚭鐨勪俊鎭涓庢枃妗d腑鐨勬枃鏈涓�鑷达紝涓嶈閬楁紡浠讳綍鍙傛暟銆�
+                        濡傛灉鏂囨。涓病鏈夊弬鏁拌〃鍒欒緭鍑虹┖鏁扮粍銆�
+                        涓ユ牸鎸夌収杈撳嚭绀轰緥涓殑鏍煎紡杈撳嚭锛屼粎杈撳嚭json銆�
+                        # 瑕佹眰
+                        1涓瓧鑺傜殑闀垮害涓�8浣嶏紝浣跨敤B0-B7鏉ヨ〃绀恒��
+                        鎵�鏈変綅缃俊鎭渶瑕佽浆鎹负瑕佹眰鏍煎紡"Byte1_B6~Byte2_B0"杩涜杈撳嚭锛屽鏋滀笌瑕佹眰鏍煎紡涓嶅悓鐨勮杩涜琛ュ叏鎴栬浆鎹紝渚嬪锛�"Byte1_B0~B2" 杞崲涓� "Byte1_B0~Byte1_B2"銆備緥濡傦細"Byte1~Byte2" 杞崲涓� "Byte1_B0~Byte2_B7"銆備緥濡傦細"Byte1_B5" 杞崲涓� "Byte1_B5~Byte1_B5"銆�
+                        
+                        # 杈撳嚭绀轰緥
+                        ```json
+                        [
+                            {
+                                "posText": "Byte1_B6~Byte2_B0",
+                                "name": "xxx",
+                                "id": "xxxxxx"
+                            }
+                        ]
+                        ```
+                        # 娌℃湁鍙傛暟鏃剁殑杈撳嚭绀轰緥
+                        ```json
+                        []
+                        ```""")
+            _cache_file = f"{self.json_path}/鏁版嵁鍖�-{utils.to_file_name(pkt_name)}-鍙傛暟鍒楄〃.json"
+            if utils.file_exists(_cache_file):
+                return json.loads(utils.read_from_file(_cache_file))
+            title_line = _doc_text.splitlines()[0]
+            tables = re.findall(r"```json(.*)```", _doc_text, re.DOTALL)
+            if tables:
+                table_text = tables[0]
+                table_blocks = []
+                if len(table_text)>50000:
+                    table = json.loads(table_text)
+                    header:list = table[0:20]
+                    for i in range(math.ceil((len(table)-20)/200)):
+                        body = table[20 + i * 200:20 + (i + 1) * 200]
+                        block = []
+                        block.extend(header)
+                        block.extend(body)
+                        table_blocks.append(f"{title_line}\n```json\n{json.dumps(block,indent=2,ensure_ascii=False)}\n```")
+                else:
+                    table_blocks.append(table_text)
+                param_list = []
+                block_idx = 0
+                for tb_block in table_blocks:
+                    _block_cache_file = f"{self.json_path}/pkts/鏁版嵁鍖�-{utils.to_file_name(pkt_name)}-鍙傛暟鍒楄〃-{block_idx}.json"
+                    block_idx += 1
+                    text = await asyncio.to_thread(self.generate_text_json, _msg, _block_cache_file, [], tb_block, None)
+                    json_list = json.loads(text)
+                    for par in json_list:
+                        if not re.match('^Byte\d+_B[0-7]~Byte\d+_B[0-7]$', par['posText']):
+                            par['posText'] = get_single_pos(par['posText'])
+                        if not any(filter(lambda p: p['posText']==par['posText'], param_list)):
+                            param_list.append(par)
+                for par in param_list:
+                    if par['posText'] is not None:
+                        par['id'] = re.sub('[^_a-zA-Z0-9]', '_', par['id'])
+                        pos_data = self.handle_pos(par['posText'])
+                        par['pos'] = pos_data['pos']
+                        par['length'] = pos_data['length']
+                save_to_file(json.dumps(param_list, ensure_ascii=False, indent=2), _cache_file)
+                return param_list
+            else:
+                return []
+        # 鍗曠嫭澶勭悊鏈纭幏鍙栫殑浣嶇疆淇℃伅
+        def get_single_pos(txt):
             _msg = f"""
-杈撳嚭鏂囨。涓弿杩扮殑鍚嶇О涓衡�渰pkt_name}鈥濅唬鍙蜂负鈥渰pkt_id}鈥濋仴娴嬪寘锛�
-閬ユ祴鍖呭瓧娈靛寘鎷細鍚嶇О(name)銆佷唬鍙�(id)銆佺被鍨�(type)銆佸寘澶村睘鎬у垪琛�(headers)銆佹暟鎹煙鍙傛暟鍒楄〃(datas)锛岀被鍨嬩负 linear锛�
-鍖呭ご灞炴�у瓧娈靛寘鎷細鍚嶇О(name)銆佷唬鍙�(id)銆佷綅缃�(pos)銆佸畾涔�(content)銆侀暱搴�(length)銆佺被鍨�(type)锛岀被鍨嬩负 para锛�
-鏁版嵁鍩熷弬鏁板瓧娈靛寘鎷細鍙傛暟鍚嶇О(name)銆佸弬鏁颁唬鍙�(id)銆佷綅缃�(pos)銆侀暱搴�(length)銆佸瓧鑺傞『搴�(byteOrder)锛岀被鍨嬩负 para锛�
-濡傛灉娌℃湁鍚嶇О鐢ㄤ唬鍙蜂唬鏇匡紝濡傛灉娌℃湁浠e彿鐢ㄥ悕绉扮殑鑻辨枃缈昏瘧浠f浛锛岀炕璇戝敖閲忕畝鐭紱
-浣犻渶瑕佺悊瑙f暟鎹寘鐨勪綅缃俊鎭紝骞朵笖灏嗘墍鏈夎緭鍑哄崟浣嶇粺涓�杞崲涓� bits锛屼綅缃瓧娈电殑杈撳嚭鏍煎紡蹇呴』涓烘暟鍊肩被鍨�;
-鏁版嵁缁撴瀯浠呭彧鍖呭惈閬ユ祴鍖咃紝浠呰緭鍑簀son锛屼笉瑕佽緭鍑轰换浣曞叾浠栧唴瀹广��"""
-            print(f'閬ユ祴婧愬寘鈥渰pkt_name}鈥濅俊鎭細')
+                1涓瓧鑺傜殑闀垮害涓�8浣嶏紝浣跨敤B0-B7鏉ヨ〃绀恒��
+                灏嗏�渰txt}鈥濊浆鎹负瑕佹眰鏍煎紡"Byte1_B6~Byte2_B0"杩涜杈撳嚭锛屽鏋滀笌瑕佹眰鏍煎紡涓嶅悓鐨勮杩涜琛ュ叏鎴栬浆鎹紝渚嬪锛�"Byte1_B0~B2" 杞崲涓� "Byte1_B0~Byte1_B2"銆備緥濡傦細"Byte1~Byte2" 杞崲涓� "Byte1_B0~Byte2_B7"銆備緥濡傦細"Byte1_B5" 杞崲涓� "Byte1_B5~Byte1_B5"銆�
+                杈撳嚭绀轰緥锛欱yte1_B6~Byte2_B0
+                浠呰緭鍑虹粨鏋滐紝涓嶈緭鍑哄叾浠栨枃瀛�
+            """
 
-            def validation(gen_text):
-                _pkt = json.loads(gen_text)
-                assert 'headers' in _pkt, '鍖呯粨鏋勪腑蹇呴』鍖呭惈headers瀛楁'
-                assert 'datas' in _pkt, '鍖呯粨鏋勪腑蹇呴』鍖呭惈datas瀛楁'
+            def validation(return_txt):
+                assert re.match('^Byte\d+_B[0-7]~Byte\d+_B[0-7]$', return_txt), '鏍煎紡涓嶆纭�'
 
-            text = self.generate_text(_msg, cache_file, [], files, validation)
-            pkt = json.loads(text)
-        else:
-            pkt = json.loads(read_from_file(cache_file))
-        pkt_len = 0
-        for par in pkt['datas']:
-            par['pos'] = pkt_len
-            pkt_len += par['length']
-        pkt['length'] = pkt_len
+            text = self.generate_text_json(_msg, "", doc_text="", validation=validation)
+            return text
+
+        params = []
+        header_params, data_area_params = (
+            await asyncio.gather(get_header_params(pkt_name, doc_text),
+                                 get_data_area_params(pkt_name, doc_text)))
+
+        params.extend(data_area_params)
+
+        pkt['headers'] = header_params
+        pkt['datas'] = data_area_params
+
+        # async def get_param_info(para):
+        #     _msg2 = """
+        #         # 闇�姹�
+        #         浠庢枃鏈腑鎻愬彇鍖洪棿璧峰鍋忕Щ浣嶇疆鍜屽尯闂撮暱搴︼紝鍗曚綅涓烘瘮鐗广�傛枃鏈腑鐨勫唴瀹逛负鍖洪棿鎻忚堪锛屽叾涓細Byte<N> 琛ㄧず绗� N 涓瓧鑺傦紝N 浠� 1 寮�濮嬶紝B<X> 琛ㄧず绗� X 涓瘮鐗癸紝X 浠� 0 - 7 锛屽尯闂翠负闂尯闂淬��
+        #         鎵�鏈夋暟瀛﹁绠楁槸闇�瑕佺畻鏁拌〃杈惧紡锛屼笉闇�瑕佽绠楃粨鏋溿�傝绠楀叕寮忓涓嬶細
+        #         - ByteN_BX璧峰鍜岀粨鏉熶綅缃細(N - 1)*8 + X
+        #         - ByteN 璧峰浣嶇疆锛�(N - 1)*8
+        #         - ByteN 缁撴潫浣嶇疆锛�(N - 1) * 8 + 7
+        #         - 闀垮害锛氱粨鏉熷亸绉讳綅缃� + 1 - 璧峰鍋忕Щ浣嶇疆锛岄棴鍖洪棿鐨勯暱搴﹂渶瑕佺粨鏉熶綅缃姞1鍐嶅噺鍘昏捣濮嬩綅缃�
+        #         # 鐢熸垚妯℃澘
+        #         鎺ㄧ悊杩囩▼锛氱畝瑕佽鏄庢彁鍙栦俊鎭強璋冪敤 tool 璁$畻鐨勮繃绋嬨�傝緭鍑虹粨鏋滐細鎸� JSON 鏍煎紡杈撳嚭锛屾牸寮忓涓嬶細
+        #         {
+        #           "offset": "璧峰鍋忕Щ浣嶇疆琛ㄨ揪寮�",
+        #           "length": "闀垮害璁$畻琛ㄨ揪寮�"
+        #         }
+        #         鏂囨湰锛�
+        #         """+f"""
+        #         {para['posText']}
+        #     """
+        #     text2 = await asyncio.to_thread(self.generate_text_json, _msg2, '', [], '')
+        #     try:
+        #         out = json.loads(text2)
+        #         para['pos'] = eval(out['offset'])
+        #         para['posRet'] = text2
+        #         para['length'] = eval(out['length'])
+        #         para['id'] = re.sub(r"[^0-9a-zA-Z_]", "_", para['code'])
+        #         para['type'] = 'para'
+        #     except Exception as e:
+        #         print(e)
+        # tasks = []
+        # for param in params:
+        #     tasks.append(get_param_info(param))
+        #
+        # s = time.time()
+        # await asyncio.gather(*tasks)
+        # e = time.time()
+        if params:
+            offset = params[0]['pos']
+            for para in params:
+                para['pos'] -= offset
+        # print(f'======鍙傛暟鏁伴噺锛歿len(params)}锛岃�楁椂锛歿e - s}')
+        utils.save_text_to_file(json.dumps(pkt, ensure_ascii=False, indent=4), cache_file)
         return pkt
 
-    def gen_pkts(self):
-        _msg = f"""
-杈撳嚭鏂囨。涓弿杩扮殑閬ユ祴鍖呫��
-閬ユ祴鍖呭瓧娈靛寘鎷細鍚嶇О(name)銆佷唬鍙�(id)銆乭asParams锛�
-鍚嶇О涓笉瑕佸寘鍚唬鍙凤紝
-hasParams琛ㄧず褰撳墠閬ユ祴鍖呮槸鍚︽湁鍙傛暟鍒楄〃锛岄仴娴嬪寘鐨勫弬鏁拌〃绱ф帴鐫�閬ユ祴鍖呯珷鑺傛爣棰橈紝濡傛灉绔犺妭鏍囬鍚庨潰鐪佺暐浜嗘垨鑰呰瑙亁xx鍒欐槸娌℃湁鍙傛暟琛紝
-濡傛灉娌℃湁浠e彿鐢ㄥ悕绉扮殑鑻辨枃缈昏瘧浠f浛锛屽鏋滄病鏈夊悕绉扮敤浠e彿浠f浛锛�
-鏁版嵁缁撴瀯鏈�澶栧眰涓烘暟缁勬暟缁勫厓绱犱负閬ユ祴鍖咃紝涓嶅寘鎷仴娴嬪寘涓嬮潰鐨勫弬鏁般��
+    async def gen_pkts(self):
+        _msg = """
+# 瑙掕壊
+浣犳槸涓�鍚嶈祫娣辫蒋浠跺伐绋嬪笀銆�
+# 鎸囦护
+鎴戦渶瑕佷粠鏂囨。涓彁鍙栭仴娴嬪寘鏁版嵁锛屼綘瑕佹牴鎹枃妗e唴瀹瑰府鎴戝畬鎴愰仴娴嬪寘鏁版嵁鐨勬彁鍙栥��
+# 闇�姹�
+杈撳嚭鏂囨。涓弿杩扮殑閬ユ祴鍖呭垪琛紝閬ユ祴鍖呭瓧娈靛寘鎷細鍚嶇О(name)銆佷唬鍙�(id)銆�
+瀛楁鎻忚堪锛�
+1.鍚嶇О锛氶仴娴嬪寘鐨勫悕绉帮紱
+2.浠e彿锛氶仴娴嬪寘鐨勪唬鍙凤紱
+# 绾︽潫
+- name锛氬悕绉颁腑涓嶈鍖呭惈浠e彿锛屼粎浠庢枃妗d腑鎻愬彇婧愬寘鍚嶇О锛�
+- 濡傛灉娌℃湁浠e彿锛屼娇鐢ㄩ仴娴嬪寘鍚嶇О鐨勮嫳鏂囩炕璇戜唬鏇匡紱
+- 濡傛灉娌℃湁鍚嶇О鐢ㄤ唬鍙蜂唬鏇匡紱
+- 娉ㄦ剰锛屼竴瀹氳杈撳嚭鎵�鏈夌殑閬ユ祴鍖咃紝涓嶈婕忔帀浠讳綍涓�涓仴娴嬪寘锛�
+- 鏁版嵁缁撴瀯鏈�澶栧眰涓烘暟缁勬暟缁勫厓绱犱负閬ユ祴鍖咃紝涓嶅寘鎷仴娴嬪寘涓嬮潰鐨勫弬鏁般��
+# 渚嬪瓙
+[
+    {
+        "name": "鏁扮鏁板瓧閲忓揩閫熸簮鍖�",
+        "id": "PMS001",
+    }
+]
 """
         print(f'閬ユ祴婧愬寘鍒楄〃锛�')
-        files = [file_map['閬ユ祴婧愬寘璁捐鎶ュ憡']]
-        text = self.generate_text(_msg, 'out/婧愬寘鍒楄〃.json', [], files)
+        doc_text = self.get_text_with_entity(['婧愬寘鍒楄〃'])
+        text = await asyncio.to_thread(self.generate_text_json, _msg, f'{self.json_path}/婧愬寘鍒楄〃.json', doc_text=doc_text)
         pkt = json.loads(text)
         return pkt
 
-    def gen_pkt_vc(self):
-        _msg = f"""
-鏍规嵁閬ユ祴婧愬寘涓嬩紶鏃舵満瀹氫箟锛岃緭鍑哄悇涓仴娴嬫簮鍖呬俊鎭垪琛紝椤剁骇缁撴瀯涓烘暟缁勫厓绱犱负閬ユ祴婧愬寘锛屾簮鍖呭瓧娈靛寘鎷細鍖呬唬鍙�(id)锛屽悕绉�(name)锛屾墍灞炶櫄鎷熶俊閬�(vcs)锛屼笅浼犳椂鏈猴紙timeTags锛�
+    async def gen_pkt_vc(self):
+        _msg = """
+# 闇�姹�
+鏍规嵁鏂囨。鍐呭杈撳嚭閬ユ祴婧愬寘淇℃伅锛屾簮鍖呭瓧娈靛寘鎷細鍖呬唬鍙�(id)锛屽悕绉�(name)锛屾墍灞炶櫄鎷熶俊閬�(vcs)銆�
+鎵�鏈夊瓧娈典粎浣跨敤鏂囨。鍐呭杈撳嚭銆�
+琛ㄦ牸涓仴娴嬫簮鍖呬笉鏄寜鍚嶇О鏉ユ帓搴忕殑锛屾寜鐓ф枃妗d腑鐨勮〃鏍间腑鐨勯仴娴嬫簮鍖呴『搴忚繘琛岃緭鍑恒��
+姣忎釜鍖呴兘瑕佽緭鍑恒��
+鎵�灞炶櫄鎷熶俊閬擄細閫氳繃琛ㄦ牸涓弿杩扮殑涓嬩紶鏃舵満鍜岃櫄鎷熶俊閬撶殑鍒掑垎锛岃幏鍙栦笅浼犳椂鏈哄搴旂殑铏氭嫙淇¢亾浠e彿锛堝簭鍙凤級锛屽苟缁勭粐涓轰竴涓暟鎹繘琛岃緭鍑猴紝渚嬪锛氫笅浼犳椂鏈轰负瀹炴椂鍜屽欢鏃讹紝閭d箞灏辫〃绀鸿鍖呯殑鎵�灞炶櫄鎷熶俊閬撲负VC1鍜孷C3銆傚鏋滄病鏈夊尮閰嶄笅浼犳椂鏈猴紝灏卞~鍏ョ┖鏁扮粍銆�
+# 杈撳嚭绀轰緥锛�
+[
+  {
+    "id": "PMS001",
+    "name": "鏁扮鏁板瓧閲忓揩閫熸簮鍖�",
+    "vcs": ["VC1",'VC2']
+  },
+]
         """
-        files = [file_map['閬ユ祴澶х翰']]
         print('閬ユ祴婧愬寘鎵�灞炶櫄鎷熶俊閬擄細')
 
         def validation(gen_text):
             pkts = json.loads(gen_text)
             assert len(pkts), 'VC婧愬寘鍒楄〃涓嶈兘涓虹┖'
 
-        text = self.generate_text(_msg, 'out/閬ユ祴VC婧愬寘.json', files=files, validation=validation)
+        doc_text = self.get_text_with_entity(['铏氭嫙淇¢亾瀹氫箟', '閬ユ祴婧愬寘涓嬩紶鏃舵満'])
+        text = await asyncio.to_thread(
+            lambda: self.generate_text_json(_msg, f'{self.json_path}/閬ユ祴VC婧愬寘.json', doc_text=doc_text, validation=validation))
         pkt_vcs = json.loads(text)
         return pkt_vcs
 
     def gen_pkt_format(self):
-        _msg = f"""
-璇蜂粩缁嗗垎绯绘枃妗o紝杈撳嚭鍚勪釜鏁版嵁鍖呯殑鏍煎紡锛屾暟鎹粨鏋勬渶澶栧眰涓烘暟缁勶紝鏁扮粍鍏冪礌涓烘暟鎹寘鏍煎紡锛屽皢涓诲澶寸殑瀛愮骇鎻愬崌鍒颁富瀵煎ご杩欎竴绾у苟涓斿幓闄や富瀵煎ご锛屾暟鎹寘type涓簂ogic锛屽寘鏁版嵁鍩焧ype涓篴ny銆�
-鍖呮牸寮廲hildren鍖呮嫭锛氱増鏈彿(id:Ver)銆佺被鍨�(id:TM_Type)銆佸壇瀵煎ご鏍囧織(id:Vice_Head)銆佸簲鐢ㄨ繃绋嬫爣璇嗙(id:Proc_Sign)銆佸垎缁勬爣蹇�(id:Group_Sign)銆佸寘搴忓垪璁℃暟(id:Package_Count)銆佸寘闀�(id:Pack_Len)銆佹暟鎹煙(id:EPDU_DATA)銆�
-children鍏冪礌鐨勫瓧娈靛寘鎷細name銆乮d銆乸os銆乴ength銆乼ype
-娉ㄦ剰锛氱敓鎴愮殑JSON璇硶鏍煎紡瑕佸悎娉曘��
+        _msg = """
+# 瑙掕壊
+浣犳槸涓�鍚嶈祫娣辫蒋浠跺伐绋嬪笀銆�
+# 鎸囦护
+鎴戦渶瑕佷粠鏂囨。涓彁鍙栨暟鎹寘鐨勬牸寮忥紝浣犺甯姪鎴戝畬鎴愭暟鎹寘鏍煎紡鐨勬彁鍙栥��
+# 闇�姹�
+璇蜂粩缁嗗垎绯绘枃妗o紝杈撳嚭鍚勪釜鏁版嵁鍖呯殑鏍煎紡銆�
+鏁版嵁缁撴瀯鏈�澶栧眰涓烘暟缁勶紝鏁扮粍鍏冪礌涓烘暟鎹寘鏍煎紡锛屽皢涓诲澶寸殑瀛愮骇鎻愬崌鍒颁富瀵煎ご杩欎竴绾у苟涓斿幓闄や富瀵煎ご锛屾暟鎹寘type涓簂ogic锛屽寘鏁版嵁鍩焧ype涓篴ny銆�
+鍖呮牸寮忓瓧娈靛寘鎷細鍚嶇О(name)銆佷唬鍙�(id)銆佺被鍨�(type)銆佸瓙绾�(children)銆�
+children鍏冪礌鐨勫瓧娈靛寘鎷細name銆乮d銆乸os銆乴ength銆乼ype銆�
+children鍏冪礌鍖呮嫭锛氱増鏈彿(Ver)銆佺被鍨�(TM_Type)銆佸壇瀵煎ご鏍囧織(Vice_Head)銆佸簲鐢ㄨ繃绋嬫爣璇嗙(Proc_Sign)銆佸垎缁勬爣蹇�(Group_Sign)銆佸寘搴忓垪璁℃暟(Package_Count)銆佸寘闀�(Pack_Len)銆佹暟鎹煙(EPDU_DATA)銆�
+# 绾︽潫
+- 鐢熸垚鐨凧SON璇硶鏍煎紡瑕佸悎娉曘��
+# 渚嬪瓙
+{
+        "name": "瀹炴椂閬ユ祴鍙傛暟鍖�",
+        "id": "EPDU",
+        "type": "logic",
+        "children": [
+            {
+                "name": "鐗堟湰鍙�",
+                "id": "Ver",
+                "pos": 0,
+                "length": 3,
+                "type": "para",
+                "content": "0",
+                "dataTy": "INVAR"
+            },
+            {
+                "name": "鏁版嵁鍩�",
+                "id": "EPDU_DATA",
+                "pos": 3,
+                "length": "鍙橀暱",
+                "type": "any"
+            }
+        ]
+}
 """
         print('閬ユ祴鍖呮牸寮忥細')
-        text = self.generate_text(_msg, 'out/鏁版嵁鍖呮牸寮�.json', files=[file_map['閬ユ祴澶х翰']])
+        text = self.generate_text_json(_msg, f'{self.json_path}/鏁版嵁鍖呮牸寮�.json', files=[file_map['閬ユ祴澶х翰']])
         pkt_formats = json.loads(text)
         return pkt_formats
 
     def compute_length_pos(self, items: list):
-        length = 0
-        pos = 0
-        for child in items:
-            if 'children' in child:
-                self.compute_length_pos(child['children'])
-            child['pos'] = pos
-            if 'length' in child and isinstance(child['length'], int):
-                length = length + child['length']
-                pos = pos + child['length']
+        items.sort(key=lambda x: x['pos'])
+        # for child in items:
+        #     if 'children' in child:
+        #         self.compute_length_pos(child['children'])
+        #     if 'length' in child and isinstance(child['length'], int):
+        #         length = length + child['length']
+        #         pos = pos + child['length']
         # node['length'] = length
 
-    def gen_bus(self, proj_pk, rule_enc, rule_id, ds, name_path, dev_name):
-        _msg = f"""
-璇锋瀽鏂囨。锛屽垪鍑烘�荤嚎閫氫俊鍖呬紶杈撶害瀹氫腑鎻忚堪鐨勬墍鏈夋暟鎹寘鍒楄〃锛�
-鏁版嵁鍖呭瓧娈靛寘鎷細id銆乶ame銆乤pid(16杩涘埗瀛楃涓�)銆乻ervice(鏈嶅姟瀛愭湇鍔�)銆乴ength(bit闀垮害)銆乮nterval(浼犺緭鍛ㄦ湡)銆乻ubAddr(瀛愬湴鍧�/妯″紡)銆乫rameNum(閫氫俊甯у彿)銆�
-transSer(浼犺緭鏈嶅姟)銆乶ote(澶囨敞)銆乺tAddr(鎵�灞濺T鐨勫湴鍧�鍗佽繘鍒�)銆乺t(鎵�灞瀝t鍚嶇О)銆乼hroughBus(鏄惁缁忚繃鎬荤嚎)銆乥urst(鏄惁绐佸彂)銆乼ransDirect(浼犺緭鏂瑰悜)锛�
-鏁版嵁缁撴瀯鏈�澶栧眰鏄暟缁勶紝鏁扮粍鍏冪礌涓烘暟鎹寘锛屼互JSON鏍煎紡杈撳嚭锛屼笉瑕佽緭鍑篔SON浠ュ鐨勪换浣曟枃鏈��
-閫氫俊甯у彿锛氫娇鐢ㄦ枃妗d腑鐨勬枃鏈笉瑕佸仛浠讳綍杞崲銆�
-subAddr锛氬�间负鈥滄繁搴︹�濄�佲�滃钩閾衡�濄�佲�滄暟瀛椻�濇垨null銆�
-鏄惁缁忚繃鎬荤嚎鐨勫垽鏂緷鎹細鈥滃娉ㄢ�濆垪濉啓浜嗗唴瀹圭被浼尖�滀笉缁忚繃鎬荤嚎鈥濈殑鏂囧瓧琛ㄧず涓嶇粡杩囨�荤嚎鍚﹀垯缁忚繃鎬荤嚎銆�
-浼犺緭鏈嶅姟鍒嗕笁绉嶏細SetData(缃暟)銆丟etData(鍙栨暟)銆丏ataBlock(鏁版嵁鍧椾紶杈�)銆�
-浼犺緭鏂瑰悜鍒嗭細鈥濇敹鈥滃拰鈥濆彂鈥滐紝浼犺緭鏈嶅姟濡傛灉鏄�濆彇鏁扳�滄槸鈥濇敹鈥滐紝濡傛灉鏄�濇暟鎹潡浼犺緭鈥滃垯鏍规嵁鍖呮墍鍦ㄧ殑鍒嗙郴缁熶互鍙婅〃鏍肩殑鈥濅紶杈撴柟鍚戔�滃垪杩涜鍒ゆ柇锛屽垽鏂浜嶴MU鏉ヨ鏄敹杩樻槸鍙戙��
-鏄惁绐佸彂鐨勫垽鏂緷鎹細鏍规嵁琛ㄦ牸涓殑鈥濅紶杈撳懆鏈熲�滃垪杩涜鍒ゆ柇锛屽鏋滃~鍐欎簡绫讳技鈥濈獊鍙戔�滅殑鏂囧瓧琛ㄧず鏄獊鍙戝惁鍒欒〃绀轰笉鏄獊鍙戙��
+    async def gen_bus(self, proj_pk, rule_enc, rule_id, ds, name_path, dev_name):
+        _msg = """
+# 瑙掕壊
+浣犳槸涓�鍚嶈祫娣辩殑杞欢宸ョ▼甯堛��
+# 闇�姹�
+璇峰垎鏋愭枃妗d腑鐨勮〃鏍硷紝鎸夎〃鏍奸『搴忚緭鍑鸿〃鏍间腑鐨勬墍鏈夋簮鍖呬俊鎭紱
+鏁版嵁鍖呭瓧娈靛寘鎷細id(鏁版嵁鍖呬唬鍙�)銆乶ame(鏁版嵁鍖呭悕绉�)銆乤pid(16杩涘埗瀛楃涓�)銆乻ervice(鏈嶅姟瀛愭湇鍔�)銆乴ength(闀垮害)銆乮nterval(浼犺緭鍛ㄦ湡)銆乻ubAddr(瀛愬湴鍧�/妯″紡)銆乫rameNum(閫氫俊甯у彿)銆�
+transSer(浼犺緭鏈嶅姟)銆乶ote(澶囨敞)銆乼hroughBus(鏄惁缁忚繃鎬荤嚎)銆乼ransDirect(浼犺緭鏂瑰悜)銆�
+鏂囨。涓鏋滄病鏈夋暟鎹寘琛ㄥ垯杈撳嚭锛歔]銆�
+
+# 鏁版嵁鍖呭瓧娈佃鏄�
+- frameNum(閫氫俊甯у彿)锛氭枃妗d腑閫氫俊甯у彿鍒楃殑鍐呭锛�
+- subAddr(瀛愬湴鍧�/妯″紡)锛氬�煎彧鑳芥槸锛氣�滄繁搴︹�濄�佲�滃钩閾衡�濄�佹暟瀛楁垨null锛屽鏋滄槸鈥�/鈥濆垯鏄痭ull锛�
+- throughBus(鏄惁缁忚繃鎬荤嚎)鐨勫垽鏂緷鎹細鈥滃娉ㄢ�濆垪濉啓浜嗗唴瀹圭被浼尖�滀笉缁忚繃鎬荤嚎鈥濈殑鏂囧瓧琛ㄧず涓嶇粡杩囨�荤嚎鍚﹀垯缁忚繃鎬荤嚎锛�
+- transSer(浼犺緭鏈嶅姟鍒嗕笁绉�)锛氱疆鏁�(SetData)銆佸彇鏁�(GetData)銆佹暟鎹潡浼犺緭(DataBlock)锛屾牴鎹〃鏍间腑鐨勨�滀紶杈撴湇鍔♀�濆垪杩涜鍒ゆ柇锛�
+
+# 绾︽潫
+- 浠呰緭鍑簀son銆�
+- 鎸夌収琛ㄦ牸涓殑椤哄簭杩涜杈撳嚭銆�
+- 涓嶈婕忓寘銆�
+# 渚嬪瓙
+[
+    {
+        "id": "P001",
+        "name": "xxx",
+        "apid": "418",
+        "service": "(1, 2)",
+        "length": 1,
+        "interval": 1000,
+        "subAddr": null,
+        "frameNum": "1|2",
+        "transSer": "DataBlock",
+        "note": "",
+        "throughBus": true,
+        "burst": true,
+        "transDirect": ""
+    }
+]
 """
         print('鎬荤嚎鏁版嵁鍖咃細')
 
         def validation(gen_text):
-            json.loads(gen_text)
+            pkts2 = json.loads(gen_text)
+            assert not next(filter(lambda pkt2: 'transSer' not in pkt2, pkts2), None), '鎬荤嚎鍖呭睘鎬х敓鎴愪笉瀹屾暣锛岀己灏憈ransSer銆�'
 
-        text = self.generate_text(_msg, 'out/鎬荤嚎.json', files=[file_map['鎬荤嚎浼犺緭閫氫俊甯у垎閰�']], validation=validation)
-        pkts = json.loads(text)
+        rt_doc_text = self.get_text_with_entity(['RT鍦板潃鍒嗛厤'])
+        subsys_pkt_texts = self.get_text_list_with_entity(['鍒嗙郴缁熸簮鍖�'])
+        tasks = []
+        rt_adds = []
+        for subsys_pkt_text in subsys_pkt_texts:
+            doc_text = f'{rt_doc_text}\n{subsys_pkt_text}'
+            subsys = subsys_pkt_text[:subsys_pkt_text.index("\n")]
+            # 鍗曠嫭鑾峰彇RT鍦板潃锛屽苟搴旂敤鍒扮珷鑺備笅鎵�鏈夊寘
+            get_rt_msg = f"""杩斿洖{subsys}鐨凴T鍦板潃锛屼粎杈撳嚭鍗佽繘鍒剁殑缁撴灉锛屼笉瑕佽緭鍑哄叾浠栧唴瀹癸紝濡傛灉鏄郴缁熺鐞嗗崟鍏冿紙SMU锛夊垯杩斿洖0銆�"""
+            rt_info = self.generate_text_json(get_rt_msg, "", doc_text=rt_doc_text)
+            if rt_info == '0':
+                continue
+            rt_adds.append({
+                "rt": subsys,
+                "rt_addr": rt_info
+            })
+            # md5 = utils.generate_text_md5(subsys_pkt_text)
+            task = asyncio.to_thread(self.generate_text_json, _msg,
+                                     f"{self.json_path}/鎬荤嚎-{utils.to_file_name(subsys)}.json", doc_text=doc_text,
+                                     validation=validation)
+            tasks.append(task)
+        results = await asyncio.gather(*tasks)
+        pkts = []
+        # 鍒ゆ柇鏄惁瀛樺湪鎬荤嚎鏁版嵁鍖�.json
+        if os.path.isfile(f"{self.json_path}/鎬荤嚎鏁版嵁鍖呭垪琛�.json"):
+            pkts = read_from_file(f"{self.json_path}/鎬荤嚎鏁版嵁鍖呭垪琛�.json")
+            pkts = json.loads(pkts)
+        else:
+            pktid_apid_map = {}
+            for index, result in enumerate(results):
+                pkts_diretions = []
+                # 鍏ㄨ绌烘牸鍘婚櫎
+                result = re.sub(r'銆�', '', result)
+                _pkts = json.loads(result)
+                rt_name = rt_adds[index]["rt"]
+                for _pkt in _pkts:
+                    # 搴旂敤RT鍦板潃
+                    _pkt['rt'] = rt_name
+                    _pkt['rtAddr'] = rt_adds[index]["rt_addr"]
+                    _pkt['burst'] = "绐佸彂" in f"{_pkt['interval']}"
+                    if _pkt['apid'] is None or not re.match(r'[0-9A-Fa-f]+', _pkt['apid']):
+                        _pkt['apid'] = ''
+                    if _pkt['id'] in pktid_apid_map:
+                        if _pkt['apid']:
+                            pktid_apid_map[_pkt['id']] = _pkt['apid']
+                        else:
+                            _pkt['apid'] = pktid_apid_map[_pkt['id']]
+                    else:
+                        pktid_apid_map[_pkt['id']] = _pkt['apid']
+                        # 杞负bit闀垮害
+                        if _pkt['length']:
+                            if isinstance(_pkt['length'], str) and re.match(r'^\d+$', _pkt['length']):
+                                _pkt['length'] = int(_pkt['len']) * 8
+                            elif isinstance(_pkt['length'], int):
+                                _pkt['length'] = _pkt['length'] * 8
+                        pkts.append(_pkt)
+                    # 鑾峰彇寰呭鐞嗙殑浼犺緭鏂瑰悜淇℃伅
+                    pkts_diretions.append({
+                        'id': _pkt['id'],
+                        'rt': _pkt['rt'],
+                        'transDirect': _pkt['transDirect'],
+                    })
+                # 澶勭悊浼犺緭鏂瑰悜
+                _msg = """
+                    澶勭悊浼犲叆鐨刯son鏁扮粍锛屾瘡涓暟缁勫璞′腑鍖呭惈瀛楁锛歳t锛堣嚜韬澶囷級銆乼ransDirect(浼犺緭鏂瑰悜)銆�
+                    闇�瑕佷綘缁欐暟缁勫璞′腑澶氬姞涓�涓瓧娈碉紝杈撳嚭鏁扮粍涓崟涓璞$殑浼犺緭绫诲瀷锛坱ransType锛夛紝浼犺緭绫诲瀷鏈変袱绉嶅�尖�滄敹鈥濆拰鈥滃彂鈥濓紝鍒ゆ柇渚濇嵁鏄牴鎹紶杈撴柟鍚戠殑鍐呭杩涜鍒ゆ柇锛岀敱rt鍙戦�佺粰SMU鐨勪紶杈撶被鍨嬫槸鈥滄敹鈥濓紝鐢盨MU鍙戦�佺粰rt鐨勪紶杈撶被鍨嬫槸鈥滃彂鈥�
+                    rt瀛楁涓虹┖鐨勬暟鎹笉鐢ㄥ鐞嗐��
+                    鍦╰ransDirect瀛楁涓璻t鍙兘涓虹缉鍐欙紝缂╁啓瀵瑰簲鐨剅t鍚嶇О鍙互浠庢枃妗d腑杩涜璇诲彇銆�
+                    杈撳嚭缁撴灉灏嗗鍔犱簡瀛楁鐨刯son鏁扮粍鐩存帴杈撳嚭锛屼笉鐢ㄨ緭鍑哄叾浠栧唴瀹广��
+                    杈撳嚭绀轰緥锛歔{"id": "PMK013", "rt": "涓績鎺у埗鍗曞厓CCU", "transDirect": "CCU鈫扴MU鈫掑湴闈�", "transType": "鏀�"},{"id": "PMK055", "rt": "涓績鎺у埗鍗曞厓CCU", "transDirect": "SMU鈫扖UU", "transType": "鍙�"}]
+                    """ + f"""
+                    JSON锛歿pkts_diretions}
+                """
+                result_json = self.generate_text_json(_msg, f"{self.json_path}/鎬荤嚎-rt-{utils.to_file_name(rt_name)}.json", doc_text=rt_doc_text)
+                # 灏嗗鐞嗙粨鏋滃悓姝ヤ慨鏀瑰埌result
+                for pkt in _pkts:
+                    for data in json.loads(result_json):
+                        if "transType" in data:
+                            if data['id'] == pkt['id']:
+                                pkt['transType'] = data['transType']
+                                break
+        print(f"鎬荤嚎婧愬寘涓暟锛歿len(pkts)}")
         # 绛涢�夌粡鎬荤嚎鐨勬暟鎹寘
         pkts = list(filter(lambda it: it['throughBus'], pkts))
         no_apid_pkts = list(filter(lambda it: not it['apid'], pkts))
         # 绛涢�夋湁apid鐨勬暟鎹寘
         pkts = list(filter(lambda it: it['apid'], pkts))
+        # 绛涢�塺tAddr涓嶄负0鐨勬暟鎹寘锛孲MU鐨�
+        pkts = list(filter(lambda it: it['rtAddr'] != '0', pkts))
 
-        pkts2 = []
+        # 鍌ㄥ瓨鎵�鏈夋�荤嚎鍖�
+        save_to_file(json.dumps(pkts, ensure_ascii=False, indent=2), f"{self.json_path}/鎬荤嚎鏁版嵁鍖呭垪琛�.json")
+
+        tasks = []
+
+        def _run(gen_pkt_details, pkt2):
+            _pkt2 = asyncio.run(gen_pkt_details(pkt2['name'], pkt2['id']))
+            if _pkt2 is not None:
+                pkt2['children'] = []
+                pkt2['children'].extend(_pkt2['datas'])
+
         for pkt in pkts:
-            if self.pkt_in_tm_pkts(pkt["name"]):
-                pkts2.append(pkt)
-        for pkt in pkts2:
-            _pkt = self.gen_pkt_details(pkt['name'], pkt['id'])
-            if _pkt:
-                pkt['children'] = []
-                pkt['children'].extend(_pkt['datas'])
-                pkt['length'] = _pkt['length']
+            pkt_task = asyncio.to_thread(_run, self.gen_pkt_details, pkt)
+            tasks.append(pkt_task)
+
+        await asyncio.gather(*tasks)
+
         rt_pkt_map = {}
         for pkt in pkts:
             # 鏍规嵁鏁版嵁鍧椾紶杈撳拰鍙栨暟鍒嗙粍
             # 閫昏緫灏佽鍖呯殑瑙f瀽瑙勫垯ID锛歊T[rt鍦板潃]SUB[瀛愬湴鍧�]S(S浠h〃鍙栨暟锛屾柟鍚戞槸AA琛ㄧず鍙戦�侊紱R浠h〃缃暟锛屾柟鍚戞槸BB琛ㄧず鎺ュ彈)
             # 鍙栨暟锛氶�昏緫灏佽鍖呮牴鎹瓙鍦板潃鍜屽抚鍙风粍鍚堝垱寤猴紝鏈夊嚑涓粍鍚堝氨鍒涘缓鍑犱釜閫昏緫灏佽鍖�
             # 鏁版嵁鍧楋細鍙湁涓�涓�昏緫灏佽鍖�
-
+            if pkt['subAddr'] is not None and not isinstance(pkt['subAddr'], int) and pkt['subAddr'].find("/") > -1:
+                pkt['subAddr'] = pkt['subAddr'].split("/")[0]
             # 澶勭悊瀛愬湴鍧�
-            if pkt['burst']:
-                # 绐佸彂鍖呭瓙鍦板潃鏄�18~26
-                pkt['subAddr'] = 26
-            elif pkt['subAddr'] == '骞抽摵' or pkt['subAddr'] is None:
+            if pkt['subAddr'] == '骞抽摵' or not pkt['subAddr']:
                 # 骞抽摵锛�11~26锛屾病鏈夊~鍐欑殑榛樿涓哄钩閾�
-                pkt['subAddr'] = 26
+                pkt['subAddr'] = '11~26'
             elif pkt['subAddr'] == '娣卞害':
                 # 娣卞害锛�11
-                pkt['subAddr'] = 11
+                pkt['subAddr'] = '11'
 
+            pkt['burst'] = "绐佸彂" in f"{pkt['interval']}"
             # 澶勭悊甯у彿
             if pkt['burst']:
                 # 绐佸彂锛欰LL
@@ -811,12 +1238,15 @@
                 # 鍙栨暟
                 pkt_id = f"RT{rt_addr}SUB{sub_addr}"
                 vals = f"{rt_addr}/{sub_addr}/0xAA/{frame_no}/"
-                rt_pkt_map_gen(pkt, '鍙栨暟', rt_pkt_map, pkt_id, vals)
+                rt_pkt_map_gen(pkt, '鍙栨暟', rt_pkt_map, pkt_id, vals, pkts)
             elif trans_ser == 'DataBlock':
                 # 鏁版嵁鍧�
                 direct = '0xAA'
-                rt_pkt_map_gen(pkt, '鏁版嵁鍧椾紶杈�', rt_pkt_map, f"RT{rt_addr}SUB{sub_addr}{direct}",
-                               f"{rt_addr}/{sub_addr}/{direct}/ALL/")
+                if pkt['transDirect'] == '鍙�':
+                    direct = '0xBB'
+                pkt_id = f"RT{rt_addr}SUB{sub_addr}{direct}"
+                vals = f"{rt_addr}/{sub_addr}/{direct}/ALL/"
+                rt_pkt_map_gen(pkt, '鏁版嵁鍧椾紶杈�', rt_pkt_map, pkt_id, vals, pkts)
         _pkts = []
         for k in rt_pkt_map:
             _pkts.append(rt_pkt_map[k])
@@ -852,255 +1282,445 @@
             rule_enc.C_KEY = sub_key
             update_rule_enc(rule_enc)
 
-    def gen_tc(self):
+    async def gen_tc(self):
         # 鏁版嵁甯ф牸寮�
-        frame = self.gen_tc_transfer_frame()
-        # 鏁版嵁鍖呮牸寮�
-        pkt_format = self.gen_tc_transfer_pkt()
-        # 鏁版嵁鍖呭垪琛�
-        pkts = self.gen_tc_transfer_pkts()
-        for pkt in pkts:
-            pf = json.loads(json.dumps(pkt_format))
-            pf['name'] = pkt['name']
-            ph = next(filter(lambda x: x['name'] == '涓诲澶�', pf['children']), None)
-            apid = next(filter(lambda x: x['name'] == '搴旂敤杩涚▼鏍囪瘑绗�(APID)', ph['children']), None)
-            apid['value'] = pkt['apid']
-            apid['type'] = 'const'
-            sh = next(filter(lambda x: x['name'] == '鍓澶�', pf['children']), None)
-            ser = next(filter(lambda x: x['name'] == '鏈嶅姟绫诲瀷', sh['children']), None)
-            sub_ser = next(filter(lambda x: x['name'] == '鏈嶅姟瀛愮被鍨�', sh['children']), None)
-            ser['value'] = pkt['server']
-            ser['type'] = 'const'
-            sub_ser['value'] = pkt['subServer']
-            sub_ser['type'] = 'const'
+        frame_task = self.gen_tc_transfer_frame_format()
+        # 閬ユ帶鍖呮牸寮�
+        pkt_format_task = self.gen_tc_pkt_format()
+        # 閬ユ帶鍖呭垪琛�
+        instructions_task = self.gen_tc_transfer_pkts()
+        result = await asyncio.gather(frame_task, pkt_format_task, instructions_task)
+        frame = result[0]
+        pkt_format = result[1]
+        instructions = result[2]
+
+        tasks = []
+        for inst in instructions:
+            # 閬ユ帶鎸囦护鏁版嵁鍖哄唴瀹�
+            tasks.append(self.gen_tc_details(inst))
+
+        await asyncio.gather(*tasks)
+
+        for inst in instructions:
+            inst['type'] = 'insUnit'
+            format_text = json.dumps(pkt_format, ensure_ascii=False)
+            format_text = utils.replace_tpl_paras(format_text, inst)
+            pf = json.loads(format_text)
+            pf['name'] = inst['name']
+            pf['code'] = inst['code']
+            data_area = next(filter(lambda x: x['name'] == '搴旂敤鏁版嵁鍖�', pf['children']))
+            data_area['children'].append(inst)
             frame['subPkts'].append(pf)
         self.order = 0
 
         def build_def(item: dict):
-            if item['type'] == 'enum':
-                return json.dumps({"EnumItems": item['enums'], "CanInput": True})
+            if item['type'] in ['enum', 'sendFlag']:
+                if isinstance(item['enums'], str):
+                    enums = json.loads(item['enums'])
+                else:
+                    enums = item['enums']
+                return json.dumps({"EnumItems": enums, "CanInput": True}, ensure_ascii=False)
             elif item['type'] == 'length':
                 return None
             elif item['type'] == 'checkSum':
-                return json.dumps({"ChecksumType": "CRC-CCITT"})
+                return json.dumps({"ChecksumType": item['value']['type']})
             elif item['type'] == 'subPkt':
                 return json.dumps({"CanInput": False})
-            elif item['type'] == 'combPkt':
+            elif item['type'] in ['combPkt', 'insUnitList', 'input']:
                 return None
+            elif item['type'] == 'insUnit':
+                return '{"MinLength":null,"MaxLength":null,"IsSubPackage":false,"InputParams":[],"OutPutParams":[],"MatchItems":[]}'
+            elif item['type'] == 'pkt':
+                return '''{"MaxLength":1024,"IsSplit8":false,"Split8Start":null,"Split8End":null,"PadCode":null,"Alignment":null,"InputParams":[],"OutPutParams":[],"MatchItems":[]}'''
+            elif item['type'] == 'pktSeqCnt':
+                return json.dumps(
+                    {"FirstPackValue": "PackCount", "MiddlePackValue": "PackIndex", "LastPackValue": "PackIndex",
+                     "IndependPackValue": "InsUnitCount"})
             elif 'value' in item:
                 return item['value']
 
-        def create_tc_format(parent_pk, field):
+        def create_tc_format(parent_pk, field, parent_parent_pk=None):
+            """
+            鍒涘缓閬ユ帶鏍煎紡
+
+            鏁版嵁搴撴暟鎹粨鏋勶細
+            甯у瓧娈� parent_pk=null, pk=pk_001, type=1
+                鍖垮悕瀛楁(瀛愬寘) parent_pk=pk_001, pk=pk_002, type=22
+                    瀛楁1 parent_pk=pk_002, pk=pk_003, type=15
+                    瀛楁2 parent_pk=pk_002, pk=pk_004, type=15
+                鍖呭瓧娈� parent_pk=pk_001, pk=pk_005, type=1
+                    鍖垮悕瀛楁(瀛愬寘) parent_pk=pk_005, pk=pk_006, type=22
+                        瀛楁3 parent_pk=pk_006, pk=pk_007, type=15
+                    鎸囦护鍗曞厓 parent_pk=pk_005, pk=pk_007, type=4
+                        瀛楁4 parent_pk=pk_007, pk=pk_008, type=15
+
+            :param parent_pk: 鐖剁骇pk
+            :param field: 鏍煎紡瀛楁
+            :param parent_parent_pk: 鐖剁骇鐨勭埗绾k
+            :return:
+            """
             field['order'] = self.order
             self.order += 1
             field['def'] = build_def(field)
             if 'length' in field:
                 field['bitWidth'] = field['length']
             field['bitOrder'] = None
-            field['attr'] = 0
-            if field['type'] == 'length':
+            field['attr'] = make_attr(field)
+            if field['type'] == 'length' and 'value' in field and field['value']:
                 val = field['value']
                 field['range'] = val['start'] + "~" + val['end']
                 field['formula'] = val['formula']
+            # 鍗虫椂杈撳叆闀垮害涓簄ull鍒欐槸鍙橀暱瀛楁锛岄渶瑕佹妸绫诲瀷鏀逛负variableLength
+            if field['type'] == 'input' and field['length'] is None:
+                field['type'] = 'variableLength'
+                if isinstance(field['value'], dict):
+                    field['range'] = f'{field["value"]["minLength"]}~{field["value"]["maxLength"]}'
+            # 鏋氫妇鍊奸粯璁ゅ�艰缃�
+            if field['type'] == 'enum' and len(field['enums']) and not next(
+                    filter(lambda x: 'default' in x and x['default'], field['enums']), None):
+                field['enums'][0]['default'] = True
+            # 鏍¢獙鍜�
+            if field['type'] == 'checkSum':
+                field['range'] = f'{field["value"]["start"]}~{field["value"]["end"]}'
             ins_format = create_ins_format(self.proj.C_PROJECT_PK, parent_pk, field)
+            ins_format_pk = ins_format.C_INS_FORMAT_PK
             if 'children' in field:
                 autocode = 1
                 if field['type'] == 'pkt':
-                    ins_format = create_ins_format(self.proj.C_PROJECT_PK, ins_format.C_INS_FORMAT_PK,
-                                                   {'order': self.order, 'type': 'subPkt',
-                                                    'def': json.dumps({"CanInput": False})})
+                    info = {
+                        'order': self.order,
+                        'type': 'subPkt',
+                        'def': json.dumps({"CanInput": False})
+                    }
+                    ins_format = create_ins_format(self.proj.C_PROJECT_PK, ins_format_pk, info)
                     self.order += 1
                 for child in field['children']:
                     child['autocode'] = autocode
                     autocode += 1
-                    create_tc_format(ins_format.C_INS_FORMAT_PK, child)
-            # if 'subPkts' in field:
-            #     for pkt in field['subPkts']:
-            #         ins_format = create_ins_format(self.proj.C_PROJECT_PK, ins_format.C_INS_FORMAT_PK,
-            #                                        {'order': self.order, 'type': 'subPkt',
-            #                                         'def': json.dumps({"CanInput": False})})
-            #         create_tc_format(ins_format.C_INS_FORMAT_PK, pkt)
+                    if field['type'] == 'insUnitList':
+                        _parent_pk = parent_parent_pk
+                    else:
+                        _parent_pk = ins_format.C_INS_FORMAT_PK
+                    create_tc_format(_parent_pk, child, ins_format_pk)
+            if 'subPkts' in field:
+                for _pkt in field['subPkts']:
+                    create_tc_format(ins_format_pk, _pkt, parent_pk)
 
         create_tc_format(None, frame)
 
-    def gen_tc_transfer_frame(self):
+    async def gen_tc_transfer_frame_format(self):
         _msg = '''
-鍒嗘瀽YK浼犻�佸抚鏍煎紡锛屾彁鍙朰K浼犻�佸抚鐨勬暟鎹粨鏋勶紝涓嶅寘鎷暟鎹寘鐨勬暟鎹粨鏋勩��
-## 缁忛獙锛�
-瀛楁绫诲瀷鍖呮嫭锛�
-1.缁勫悎鍖咃細combPkt锛�
-2.鍥哄畾鐮佸瓧锛歝onst锛�
-3.闀垮害锛歭ength锛�
-4.鏋氫妇鍊硷細enum锛�
-5.鏍¢獙鍜岋細checkSum锛�
-6.鏁版嵁鍖猴細subPkt銆�
-
-鏍规嵁瀛楁鎻忚堪鍒嗘瀽瀛楁鐨勭被鍨嬶紝鍒嗘瀽鏂规硶锛�
-1.瀛楁鎻忚堪涓槑纭寚瀹氫簡瀛楁鍊肩殑锛岀被鍨嬩负const锛�
-2.瀛楁涓病鏈夋槑纭寚瀹氬瓧娈靛�硷紝浣嗘槸缃楀垪浜嗗彇鍊艰寖鍥寸殑锛岀被鍨嬩负enum锛�
-3.瀛楁鎻忚堪涓鏋滃瓨鍦ㄥ灞傜骇鎻忚堪鍒欑埗绾у瓧娈电殑绫诲瀷涓篶ombPkt锛�
-4.瀛楁濡傛灉鏄拰鈥滈暱搴︹�濇湁鍏筹紝绫诲瀷涓簂ength锛�
-5.濡傛灉鍜屾暟鎹煙鏈夊叧锛岀被鍨嬩负subPkt锛�
-6.瀛楁濡傛灉鍜屾牎楠屽拰鏈夊叧锛岀被鍨嬩负checkSum銆�
-
-瀛楁鍊兼彁鍙栨柟娉曪細
-1.瀛楁鎻忚堪涓槑纭寚瀹氫簡瀛楁鍊硷紝
-2.闀垮害瀛楁鐨勫�艰鏍规嵁鎻忚堪纭畾璧锋瀛楁鑼冨洿浠ュ強璁$畻鍏紡锛寁alue鏍煎紡渚嬪锛歿"start":"<code>","end":"<code>","formula":"N-1"}锛屾敞鎰忥細start鍜宔nd鐨勫�间负瀛楁code銆�
-
-## 闄愬埗锛�
-- length 鑷姩杞崲涓篵it闀垮害銆�
-- value 鏍规嵁瀛楁鎻忚堪鎻愬彇銆�
-- enums 鏈変簺瀛楁鏄灇涓惧�硷紝鏍规嵁瀛楁鎻忚堪鎻愬彇锛屾灇涓惧厓绱犵殑鏁版嵁缁撴瀯涓簕"n":"","v":"","c":""}銆�
-- 杈撳嚭鍐呭蹇呴』涓轰弗鏍肩殑json锛屼笉鑳借緭鍑洪櫎json浠ュ鐨勪换浣曞唴瀹广��
-
-瀛楁鏁版嵁缁撴瀯锛�
-涓诲澶�
-    鐗堟湰鍙枫�侀�氳繃鏍囧織銆佹帶鍒跺懡浠ゆ爣蹇椼�佺┖闂蹭綅銆丠TQ鏍囪瘑銆佽櫄鎷熶俊閬撴爣璇嗐�佸抚闀裤�佸抚搴忓垪鍙�
-浼犻�佸抚鏁版嵁鍩�
-甯у樊閿欐帶鍒跺煙銆�
-
-# 杈撳嚭鍐呭渚嬪瓙锛�
+# 瑙掕壊
+浣犳槸涓�鍚嶈祫娣辩殑杞欢宸ョ▼甯堛��
+# 鎸囦护
+鍒嗘瀽閬ユ帶浼犻�佸抚鏍煎紡锛屾彁鍙栭仴鎺т紶閫佸抚鏍煎紡鐨勫瓧娈靛畾涔夈��
+# 闇�姹�
+瑕佹彁鍙栧�肩殑甯ф牸寮忓瓧娈碉細
+- 鐗堟湰鍙凤細const锛屼簩杩涘埗锛屼互B缁撳熬锛�
+- 閫氳繃鏍囧織锛歝onst锛屼簩杩涘埗锛屼互B缁撳熬锛�
+- 鎺у埗鍛戒护鏍囧織锛歝onst锛屼簩杩涘埗锛屼互B缁撳熬锛�
+- 绌洪棽浣嶏細const锛屼簩杩涘埗锛屼互B缁撳熬锛�
+- 鑸ぉ鍣ㄦ爣璇嗭細const锛屽崄鍏繘鍒讹紝浠�0x寮�澶达紝濡傛灉鏄簩杩涘埗鎴栧崄杩涘埗闇�瑕佽浆鎹负鍗佸叚杩涘埗锛�
+- 铏氭嫙淇¢亾鏍囪瘑锛歴endFlag锛屽彂閫佹爣璁帮紝榛樿涓衡�滀换鍔℃敞鍏ュ抚鈥濓紝鎵�鏈夌殑鍊奸兘瑕佸垪涓惧嚭鏉ワ紱
+# 鏁版嵁绫诲瀷
+- const锛氬浐瀹氱爜瀛楋紝鏁板�硷紝浜岃繘鍒朵互B缁撳熬锛屽崄杩涘埗锛屽崄鍏繘鍒朵互0x寮�澶达紱
+- sendFlag锛氬彂閫佹爣璁帮紝绫讳技鏋氫妇锛屽畾涔夋牱渚嬶細[{"n":"name","v":"value","c":"code","default":true}]锛宯琛ㄧず鍚嶇О锛寁琛ㄧず鍊硷紝c琛ㄧずcode锛堟病鏈夌┖鐫�锛夛紝default琛ㄧず鏄粯璁ゅ�硷紱
+- checkSum锛氭牎楠屽拰锛屽鏋滄槸鏍¢獙鍜岀被鍨嬭繕闇�瑕佸垎鏋愭牎楠屽拰鐨勭畻娉曪紝骞朵繚瀛樺湪value鐨則ype涓紝鏍¢獙鍜岀畻娉曞寘鎷細瀛楄妭寮傛垨锛圔yteXOR锛夈�佺疮鍔犲拰鍙栧弽锛圫umNot锛夈�佺疮鍔犲拰锛圓ddSum锛夈�佸簲鐢ㄥ惊鐜啑浣欙紙CRC-CCITT锛夈�丆RC8锛圕RC8锛夈�両SO鍜屾牎楠岋紙ISOSum锛夈�佸鏍¢獙锛圤dd锛夈�佸伓鏍¢獙锛圗ven锛夈�佸叾浠栵紙Other锛�
+# 绾︽潫
+- 浠SON鏍煎紡杈撳嚭锛�
+- 浠呰緭鍑篔SON鏂囨湰锛屼笉瑕佽緭鍑轰换浣曞叾浠栨枃鏈��
+# 杈撳嚭渚嬪瓙锛�
 {
-    "name": "YK甯�",
-    "type": "pkt"
-    "children":[
-        {
-            "name": "涓诲澶�",
-            "code": "primaryHeader",
-            "length": 2,
-            "value": "00",
-            "type": "combPkt",
-            "children": [
-                {
-                    "name": "鐗堟湰鍙�",
-                    "code": "verNum"
-                    "length": 1,
-                    "value": "00"
-                }
-            ]
-        }
-    ],
-    "subPkts":[]
+    "鐗堟湰鍙�": "00B",
+    "閫氳繃鏍囧織": "0",
+    ...
 }
 '''
 
         def validation(gen_text):
             json.loads(gen_text)
 
-        text = self.generate_tc_text(_msg, 'out/tc_transfer_frame.json', files=[file_map['鎸囦护鏍煎紡']],
-                                     validation=validation)
-        frame = json.loads(text)
+        doc_text = self.get_text_with_entity(['閬ユ帶甯ф牸寮�'])
+        text = await asyncio.to_thread(
+            lambda: self.generate_tc_text(_msg, f'{self.json_path}/tc_transfer_frame.json', doc_text=doc_text,
+                                          validation=validation))
+        result: dict = json.loads(text)
+        format_text = utils.read_from_file('tpl/tc_transfer_frame.json')
+        format_text = utils.replace_tpl_paras(format_text, result)
+        frame = json.loads(format_text)
         return frame
 
-    def gen_tc_transfer_pkt(self):
+    async def gen_tc_pkt_format(self):
         _msg = '''
-浠呭垎鏋怸K鍖呮牸寮忥紝鎻愬彇YK鍖呮暟鎹粨鏋勩��
-## 缁忛獙锛�
-
-瀛楁绫诲瀷鍖呮嫭锛�
-1.缁勫悎鍖咃細combPkt锛�
-2.鍥哄畾鐮佸瓧锛歝onst锛�
-3.闀垮害锛歭ength锛�
-4.鏋氫妇鍊硷細enum锛�
-5.鏍¢獙鍜岋細checkSum锛�
-6.鏁版嵁鍖猴細subPkt銆�
-
-鏍规嵁瀛楁鎻忚堪鍒嗘瀽瀛楁鐨勭被鍨嬶紝鍒嗘瀽鏂规硶锛�
-1.瀛楁鎻忚堪涓槑纭寚瀹氫簡瀛楁鍊肩殑锛岀被鍨嬩负const锛�
-2.瀛楁涓病鏈夋槑纭寚瀹氬瓧娈靛�硷紝浣嗘槸缃楀垪浜嗗彇鍊艰寖鍥寸殑锛岀被鍨嬩负enum锛�
-3.瀛楁鎻忚堪涓鏋滃瓨鍦ㄥ灞傜骇鎻忚堪鍒欑埗绾у瓧娈电殑绫诲瀷涓篶ombPkt锛�
-4.瀛楁濡傛灉鏄拰鈥滈暱搴︹�濇湁鍏筹紝绫诲瀷涓簂ength锛�
-5.濡傛灉鍜屾暟鎹煙鏈夊叧锛岀被鍨嬩负subPkt锛�
-6.瀛楁濡傛灉鍜屾牎楠屽拰鏈夊叧锛岀被鍨嬩负checkSum銆�
-
-瀛楁鍊兼彁鍙栨柟娉曪細
-1.瀛楁鎻忚堪涓槑纭寚瀹氫簡瀛楁鍊硷紝
-2.闀垮害瀛楁鐨勫�艰鏍规嵁鎻忚堪纭畾璧锋瀛楁鑼冨洿浠ュ強璁$畻鍏紡锛寁alue鏍煎紡渚嬪锛歿"start":"<code>","end":"<code>","formula":"N-1"}锛屾敞鎰忥細start鍜宔nd鐨勫�间负瀛楁code銆�
-
-## 闄愬埗锛�
-- length 鑷姩杞崲涓篵it闀垮害銆�
-- value 鏍规嵁瀛楁鎻忚堪鎻愬彇銆�
-- enums 鏈変簺瀛楁鏄灇涓惧�硷紝鏍规嵁瀛楁鎻忚堪鎻愬彇锛屾灇涓惧厓绱犵殑鏁版嵁缁撴瀯涓簕"n":"","v":"","c":""}銆�
-- 杈撳嚭鍐呭蹇呴』涓轰弗鏍肩殑json锛屼笉鑳借緭鍑洪櫎json浠ュ鐨勪换浣曞唴瀹广��
-
-瀛楁鏁版嵁缁撴瀯锛�
-涓诲澶�
-    鍖呰瘑鍒�
-        鍖呯増鏈彿銆佸寘绫诲瀷銆佹暟鎹尯澶存爣蹇椼�佸簲鐢ㄨ繘绋嬫爣璇嗙(APID)
-    鍖呭簭鍒楁帶鍒�
-        搴忓垪鏍囧織
-        鍖呭簭鍒楄鏁�
-    鍖呴暱
-鍓澶�
-    CCSDS鍓澶存爣蹇�
-    YK鍖呯増鏈彿
-    鍛戒护姝g‘搴旂瓟锛圓ck锛�
-    鏈嶅姟绫诲瀷
-    鏈嶅姟瀛愮被鍨�
-    婧愬湴鍧�
-搴旂敤鏁版嵁鍖�
-甯у樊閿欐帶鍒跺煙銆�
-
-# 杈撳嚭鍐呭渚嬪瓙锛�
+# 瑙掕壊
+浣犳槸涓�鍚嶈祫娣辩殑杞欢宸ョ▼甯堛��
+# 鎸囦护
+鍒嗘瀽閬ユ帶鍖呮牸寮忥紝鎻愬彇閬ユ帶鍖呮牸寮忕殑瀛楁瀹氫箟銆�
+# 闇�姹�
+瑕佹彁鍙栧�肩殑鍖呮牸寮忓瓧娈碉細
+- packetVersionNumber锛氬寘鐗堟湰鍙凤紝const锛屼簩杩涘埗锛�
+- packetType锛氬寘绫诲瀷锛宑onst锛屼簩杩涘埗锛�
+- dataFieldHeaderFlag锛氭暟鎹尯澶存爣蹇楋紝const锛屼簩杩涘埗锛�
+- sequenceFlags锛氬簭鍒楁爣蹇楋紝const锛屼簩杩涘埗锛�
+- ccsdsSecondaryHeaderFlag锛氬壇瀵煎ご鏍囧織锛宑onst锛屼簩杩涘埗锛�
+- tcPktVersionNumber锛氶仴鎺у寘鐗堟湰鍙凤紝const锛屼簩杩涘埗锛�
+- acknowledgmentFlag锛氬懡浠ゆ纭簲绛旓紝const锛屼簩杩涘埗锛�
+- sourceAddr锛氭簮鍦板潃锛宑onst锛屽崄鍏繘鍒躲��
+# 鏁版嵁绫诲瀷
+- 鍥哄畾鐮佸瓧锛歝onst锛屾暟鍊硷紝浜岃繘鍒朵互B缁撳熬锛屽崄杩涘埗锛屽崄鍏繘鍒朵互0x寮�澶达紱
+- 闀垮害锛歭ength锛屽鏋滃瓧娈垫弿杩板唴瀹逛负鏁版嵁鍖哄煙鐨勯暱搴﹀垯琛ㄧず鏄暱搴︼紝闀垮害鐨剉alue涓烘暟鍊笺�乶ull鎴栬寖鍥村畾涔夛紝
+- 鏋氫妇鍊硷細enum锛�
+- 鏍¢獙鍜岋細checkSum锛屽鏋滄槸鏍¢獙鍜岀被鍨嬭繕闇�瑕佸垎鏋愭牎楠屽拰鐨勭畻娉曪紝骞朵繚瀛樺湪value鐨則ype涓紝鏍¢獙鍜岀畻娉曞寘鎷細瀛楄妭寮傛垨锛圔yteXOR锛夈�佺疮鍔犲拰鍙栧弽锛圫umNot锛夈�佺疮鍔犲拰锛圓ddSum锛夈�佸簲鐢ㄥ惊鐜啑浣欙紙CRC-CCITT锛夈�丆RC8锛圕RC8锛夈�両SO鍜屾牎楠岋紙ISOSum锛夈�佸鏍¢獙锛圤dd锛夈�佸伓鏍¢獙锛圗ven锛夈�佸叾浠栵紙Other锛�
+- 鍗虫椂杈撳叆锛歩nput锛屽鏋滄槸鍗虫椂杈撳叆value鐨勫�间负鍙橀暱瀹氫箟銆�
+## 闀垮害绫诲瀷鐨勮寖鍥村畾涔夋弿杩�
+{"start": "璧峰瀛楁code", "end": "缁撴潫瀛楁code", "formula": "璁$畻鍏紡"}
+- start锛氳捣濮嬪瓧娈礳ode锛岄暱搴﹀寘鎷捣濮嬪瓧娈碉紝瀛楁鎻忚堪涓鏄庝簡璧峰瀛楁锛�
+- end锛氱粨鏉熷瓧娈礳ode锛岄暱搴﹀寘鎷粨鏉熷瓧娈碉紝瀛楁鎻忚堪涓鏄庝簡缁撴潫瀛楁锛�
+- formula锛氳绠楀叕寮忥紝濡傛灉娌℃湁璁$畻鐩稿叧鎻忚堪鍒欒〃绀轰笉闇�瑕佽绠楀叕寮忋��
+## 鍗充娇杈撳叆绫诲瀷鐨勫彉闀垮畾涔夋弿杩�
+{"minLength": "鏈�灏忛暱搴�", "maxLength": "鏈�澶ч暱搴�", "variableLength": true}
+- minLength锛氭渶灏忛暱搴︼紝
+- maxLength锛氭渶澶ч暱搴︼紝
+- variableLength锛氭槸鍚︽槸鍙橀暱銆�
+璁$畻鍏紡瀹氫箟锛�
+- BYTES锛氭寜瀛楄妭璁$畻锛�
+- N-x锛氭�诲瓧鑺傛暟鍑弜锛屼緥濡傛�诲瓧鑺傛暟鍑�1鐨勫叕寮忎负N-1銆�
+# 绾︽潫
+- 浠SON鏍煎紡杈撳嚭锛�
+- 浠呰緭鍑篔SON鏂囨湰锛屼笉瑕佽緭鍑轰换浣曞叾浠栨枃鏈��
+# 杈撳嚭渚嬪瓙锛�
 {
-    "name": "YK鍖�",
-    "type": "pkt"
-    "children":[
-        {
-            "name": "涓诲澶�",
-            "code": "primaryHeader",
-            "length": 2,
-            "value": "00",
-            "type": "combPkt",
-            "children": [
-                {
-                    "name": "鐗堟湰鍙�",
-                    "code": "verNum"
-                    "length": 1,
-                    "value": "00"
-                }
-            ]
-        }
-    ],
-    "subPkts":[]
+    "packetVersionNumber": "00B",
+    "packetType": "1B",
+    ...
 }
 '''
 
         def validation(gen_text):
             json.loads(gen_text)
 
-        text = self.generate_tc_text(_msg, 'out/tc_transfer_pkt.json', files=[file_map['鎸囦护鏍煎紡']],
-                                     validation=validation)
-        pkt_format = json.loads(text)
+        doc_text = self.get_text_with_entity(['閬ユ帶鍖呮牸寮�'])
+        text = await asyncio.to_thread(
+            lambda: self.generate_tc_text(_msg, f'{self.json_path}/tc_transfer_pkt.json', doc_text=doc_text,
+                                          validation=validation))
+        result = json.loads(text)
+
+        format_text = utils.read_from_file('tpl/tc_pkt_format.json')
+        format_text = utils.replace_tpl_paras(format_text, result)
+        pkt_format = json.loads(format_text)
         return pkt_format
 
-    def gen_tc_transfer_pkts(self):
+    async def gen_tc_transfer_pkts(self):
         _msg = '''
-鍒嗘瀽鏂囨。鍒楀嚭鎵�鏈夌殑閬ユ帶婧愬寘銆�
-## 鏁版嵁缁撴瀯濡備笅锛�
+# 瑙掕壊
+浣犳槸涓�鍚嶈祫娣辩殑杞欢宸ョ▼甯堛��
+# 鎸囦护
+鍒嗘瀽鏂囨。鍒楀嚭鎵�鏈夌殑閬ユ帶鎸囦护銆�
+# 绾︽潫
+- 搴旂敤杩囩▼鏍囪瘑锛氬簲鐢ㄨ繃绋嬫爣璇嗗氨鏄疉PID锛屼竴鑸細鍦ㄥ悕绉板悗鐨勬嫭鍙蜂腑鍒楀嚭鏉ワ紱
+- code锛氭寚浠や唬鍙凤紝濡傛灉娌℃湁濉啓鎴栬�呮槸鈥�/鈥濆垯浣跨敤绌哄瓧绗︿覆浠f浛锛�
+- name锛氭寚浠ゅ悕绉帮紝鏍规嵁琛ㄦ牸琛屽唴瀹规彁鍙栵紝娉ㄦ剰鏄鍐呭锛屾敞鎰忓悕绉伴渶瑕佹彁鍙栧畬鏁达紝濡傛灉鏈夊鍒楀垯鍚堝苟鐢�-鍒嗗壊锛�
+- shortName锛氭寚浠ゅ悕绉帮紝鏍规嵁琛ㄦ牸鍐呭鎻愬彇锛�
+- apid: 搴旂敤杩囩▼鏍囪瘑绗︼紱
+- serviceType锛氭湇鍔$被鍨嬶紱
+- serviceSubtype锛氭湇鍔″瓙绫诲瀷锛�
+- dataArea锛氬簲鐢ㄦ暟鎹尯锛屾彁鍙栬〃鏍间腑鐨勫簲鐢ㄦ暟鎹尯鍐呭銆�
+# 杈撳嚭渚嬪瓙锛�
 [{
-"name": "xxx",
+"name": "aaa-xxx",
+"shortName": "xxx"
 "code":"pkt",
 "apid":"0xAA",
-"server":"0x1",
-"subServer":"0x2"
+"serviceType":"0x1",
+"serviceSubtype":"0x2",
+"dataArea": ""
 }]
 '''
 
         def validation(gen_text):
             json.loads(gen_text)
 
-        text = self.generate_tc_text(_msg, 'out/tc_transfer_pkts.json', files=[file_map['鎸囦护鏍煎紡']],
-                                     validation=validation)
+        doc_text = self.get_text_with_entity(['APID鍒嗛厤'])
+        text = await asyncio.to_thread(
+            lambda: self.generate_tc_text(_msg, f'{self.json_path}/tc_transfer_pkts.json', doc_text=doc_text,
+                                          validation=validation))
         pkts = json.loads(text)
         return pkts
 
+    async def gen_tc_details(self, pkt):
+        result = []
+        tc_name = pkt['shortName']
+        tc_code = pkt['code']
+        pkt['name'] = f'{tc_code} {tc_name}'
+        _msg = f"""
+# 瑙掕壊
+浣犳槸涓�涓祫娣辫蒋浠跺伐绋嬪笀銆�
 
-if __name__ == '__main__':
+# 鎸囦护
+鍒嗘瀽鏂囨。锛屼粠鏂囨。涓彁鍙栭仴鎺ф寚浠ゅ悕绉颁负鈥渰tc_name}鈥濅唬鍙蜂负鈥渰tc_code}鈥濈殑鎸囦护搴旂敤鏁版嵁鍖哄畾涔夈��
+
+鏈変簺鏂囨。鍐呭闈炲父绠�鍗曚粎浠呭寘鍚壒瀹氬瓧鑺傜殑鍐呭鎻忚堪锛屽鏋滄槸杩欑鏂囨。锛屽垯姣忎釜鐗瑰畾瀛楄妭鐨勫唴瀹规弿杩板畾涔変负涓�涓瓧娈碉紝瀛楁绫诲瀷鏍规嵁瀛楄妭鍐呭纭畾銆�
+""" + """
+
+# 瀛楁绫诲瀷
+- 鍥哄畾鐮佸瓧锛歝onst锛屾暟鍊硷紝浜岃繘鍒朵互B缁撳熬锛屽崄杩涘埗锛屽崄鍏繘鍒朵互0x寮�澶达紱
+- 闀垮害锛歭ength锛屽鏋滃瓧娈垫弿杩板唴瀹逛负鏁版嵁鍖哄煙鐨勯暱搴﹀垯琛ㄧず鏄暱搴︼紝闀垮害鐨剉alue涓烘暟鍊笺�乶ull鎴栬寖鍥村畾涔夛紝
+- 鏋氫妇鍊硷細enum锛�
+- 鏍¢獙鍜岋細checkSum锛屽鏋滄槸鏍¢獙鍜岀被鍨嬭繕闇�瑕佸垎鏋愭牎楠屽拰鐨勭畻娉曟槸浠�涔堜互鍙婃牎楠屾暟鎹煙鑼冨洿锛屽苟淇濆瓨鍦╲alue涓紝渚嬪锛歿 "type":"CRC-CCITT", "start": "START", "end":"END" }锛�
+- 鍗虫椂杈撳叆锛歩nput锛屽鏋滄槸鍗虫椂杈撳叆value鐨勫�间负鍙橀暱瀹氫箟銆�
+
+## 闀垮害绫诲瀷鐨勮寖鍥村畾涔夋弿杩�
+{"start": "璧峰瀛楁code", "end": "缁撴潫瀛楁code", "formula": "璁$畻鍏紡"}
+- start锛氳捣濮嬪瓧娈礳ode锛岄暱搴﹀寘鎷捣濮嬪瓧娈碉紝瀛楁鎻忚堪涓鏄庝簡璧峰瀛楁锛�
+- end锛氱粨鏉熷瓧娈礳ode锛岄暱搴﹀寘鎷粨鏉熷瓧娈碉紝瀛楁鎻忚堪涓鏄庝簡缁撴潫瀛楁锛�
+- formula锛氳绠楀叕寮忥紝濡傛灉娌℃湁闀垮害鐗规畩璁$畻鐩稿叧鎻忚堪鍒欎娇鐢˙YTES銆�
+璁$畻鍏紡瀹氫箟锛�
+- BYTES锛氭寜瀛楄妭璁$畻锛屽瓧鑺傛暟锛�
+- N-x锛氭�诲瓧鑺傛暟鍑弜锛屼緥濡傛�诲瓧鑺傛暟鍑�1鐨勫叕寮忎负N-1銆�
+## 鍗充娇杈撳叆绫诲瀷鐨勫彉闀垮畾涔夋弿杩�
+{"minLength": "鏈�灏忛暱搴�", "maxLength": "鏈�澶ч暱搴�", "variableLength": true}
+- minLength锛氭渶灏忛暱搴︼紝
+- maxLength锛氭渶澶ч暱搴︼紝
+- variableLength锛氭槸鍚︽槸鍙橀暱銆�
+
+# 瀛楁绫诲瀷鍒嗘瀽鏂规硶
+- 鏍规嵁瀛楁鎻忚堪鍒嗘瀽瀛楁鐨勭被鍨嬶紱
+- 瀛楁鎻忚堪涓槑纭寚瀹氫簡瀛楁鍊肩殑锛岀被鍨嬩负const锛�
+- 瀛楁鎻忚堪涓病鏈夋槑纭寚瀹氬瓧娈靛�硷紝浣嗘槸缃楀垪浜嗗彇鍊艰寖鍥寸殑锛岀被鍨嬩负enum锛�
+- 瀛楁鎻忚堪涓鏋滄病鏈夋槑纭寚瀹氬瓧娈靛�间篃娌℃湁缃楀垪鍙栧�艰寖鍥寸殑锛岀被鍨嬩负input锛�
+- 瀛楁濡傛灉鎻忚堪浜嗗綋鍓嶆寚浠や腑鐨勬暟鎹煙闀垮害浠ュ強闀垮害鑼冨洿鍒欐槸闀垮害绫诲瀷length锛屽惁鍒欎笉鏄暱搴︾被鍨嬶紱
+- 濡傛灉鍜屾暟鎹煙鏈夊叧锛岀被鍨嬩负const锛�
+- 鏍¢獙鍜岀被鍨嬶細瀛楁濡傛灉涓庡綋鍓嶆寚浠ゆ暟鎹尯鐨勬牎楠屽拰鏈夊叧鍒欎负鏍¢獙鍜岀被鍨嬪惁鍒欎笉鏄牎楠屽拰绫诲瀷锛屽垎鏋愭牎楠屽拰鐨勭畻娉曪紝骞朵繚瀛樺湪value涓紝鏍¢獙鍜岀畻娉曞寘鎷細瀛楄妭寮傛垨锛圔yteXOR锛夈�佺疮鍔犲拰鍙栧弽锛圫umNot锛夈�佺疮鍔犲拰锛圓ddSum锛夈�佸簲鐢ㄥ惊鐜啑浣欙紙CRC-CCITT锛夈�丆RC8锛圕RC8锛夈�両SO鍜屾牎楠岋紙ISOSum锛夈�佸鏍¢獙锛圤dd锛夈�佸伓鏍¢獙锛圗ven锛夈�佸叾浠栵紙Other锛夈��
+
+# 绾︽潫
+## 瀛楁灞炴��
+- code 濡傛灉娌℃湁鏄庣‘瀹氫箟鍒欎娇鐢ㄥ悕绉扮殑鑻辨枃缈昏瘧锛屽敖閲忕畝鐭紝濡傛灉娌℃湁濉啓鎴栬�呬负鏂滅嚎琛ㄧず娌℃湁鏄庣‘瀹氫箟锛�
+- length 鑷姩杞崲涓篵it闀垮害锛屽繀椤绘槸鏁板�笺�乶ull鎴栬寖鍥村畾涔夛紝涓嶈兘涓�0锛�
+- value 鏍规嵁瀛楁鎻忚堪鎻愬彇瀛楁鍊硷紝瀛楁鍊间竴鑸负鏁板�肩被鍨嬶紝闇�瑕佹牴鎹瓧娈电被鍨嬫潵鍒嗘瀽锛屽鏋滄槸length绫诲瀷value鐨勫�间负鑼冨洿瀹氫箟锛�
+- enums 鏋氫妇绫诲瀷鐨勫瓧娈靛繀椤昏鏈塭nums锛屾牴鎹瓧娈垫弿杩版彁鍙栵紝鏋氫妇鍏冪礌鐨勬暟鎹粨鏋勪负{"n":"","v":"","c":""}锛�
+## 瀛楁绫诲瀷
+- 闀垮害绫诲瀷瀛楁鐨勮寖鍥村畾涔変腑鐨剆tart鍜宔nd蹇呴』鏄敓鎴愮粨鏋滀腑鐨勫瓧娈礳ode锛岄暱搴﹁寖鍥村寘鎷瑂tart鍜宔nd锛屽繀椤讳娇鐢ㄩ暱搴︽弿杩颁腑鐨勫瓧娈碉紱
+- 濡傛灉娌℃湁闀垮害鑼冨洿鎻忚堪鍒欎笉鏄暱搴︾被鍨嬶紱
+- 鏍¢獙鍜岀被鍨嬪瓧娈靛繀椤绘弿杩扮殑鏄綋鍓嶆寚浠ゆ暟鎹煙鏍¢獙鍜岋紝濡傛灉鎻忚堪鐨勪笉鏄綋鍓嶆寚浠ょ殑鏁版嵁鍩熸牎楠屽拰鍒欎笉鏄牎楠屽拰绫诲瀷锛�
+- 杈撳嚭鏁版嵁缁撴瀯涓烘暟缁勶紝鏁扮粍鍏冪礌涓哄瓧娈典俊鎭紱
+- 杈撳嚭鍐呭蹇呴』涓轰弗鏍肩殑json锛屼笉鑳借緭鍑洪櫎json浠ュ鐨勪换浣曞唴瀹广��
+
+# 杈撳嚭渚嬪瓙锛�
+[
+    {
+        "name": "para1",
+        "code": "para1",
+        "length": 8,
+        "type": "const",
+        "value": "0xAA"
+    },
+    {
+        "name": "para2",
+        "code": "para2",
+        "length": 8,
+        "type": "length",
+        "value": {"start": "data", "end": "data", "formula": "BYTES"}
+    },
+    {
+        "name": "para3",
+        "code": "para3",
+        "length": 8,
+        "type": "enum",
+        "value": "",
+        "enums": [{"n":"鍙傛暟1","v":"0x0A","c":"Para1"}]
+    },
+    {
+        "name": "鏁版嵁",
+        "code": "data",
+        "length": null,
+        "type": "input",
+        "value": ""
+    },
+    {
+        "name": "鏍¢獙鍜�",
+        "code": "checksum",
+        "length": 2,
+        "type": "checkSum",
+        "value": { "type": "CRC-CCITT", "start":"para1", "end":"data" }
+    }
+]
+"""
+
+        def validation(gen_text):
+            fields = json.loads(gen_text)
+            for field in fields:
+                if field['type'] == 'length':
+                    if field['value'] is None:
+                        raise Exception('length绫诲瀷鐨剉alue涓嶈兘涓虹┖')
+                    if 'start' not in field['value'] or 'end' not in field['value']:
+                        raise Exception('length绫诲瀷鐨剉alue蹇呴』鍖呭惈start鍜宔nd')
+                    if field['value']['start'] not in [f['code'] for f in fields]:
+                        raise Exception('length绫诲瀷鐨剉alue鐨剆tart瀛楁蹇呴』鍦╢ields涓�')
+                    if field['value']['end'] not in [f['code'] for f in fields]:
+                        raise Exception('length绫诲瀷鐨剉alue鐨別nd瀛楁蹇呴』鍦╢ields涓�')
+                elif field['type'] == 'enum':
+                    if 'enums' not in field:
+                        raise Exception('enum绫诲瀷鐨刦ield蹇呴』鍖呭惈enums')
+                    if len(field['enums']) == 0:
+                        raise Exception('enum绫诲瀷鐨刦ield鐨別nums涓嶈兘涓虹┖')
+                    for enum in field['enums']:
+                        if 'n' not in enum or 'v' not in enum:
+                            raise Exception('enum绫诲瀷鐨刦ield鐨別nums鐨勫厓绱犲繀椤诲寘鍚玭銆乿銆乧')
+                        if enum['n'] == '' or enum['v'] == '':
+                            raise Exception('enum绫诲瀷鐨刦ield鐨別nums鐨勫厓绱犱笉鑳戒负绌�')
+
+        doc_text = self.get_text_with_entity([tc_name])
+        if doc_text == '':
+            doc_text = self.get_text_with_tc_name(tc_name)
+        if doc_text == '':
+            doc_text = pkt['dataArea']
+        text = await asyncio.to_thread(self.generate_tc_text, _msg,
+                                       f"{self.json_path}/閬ユ帶鎸囦护鏁版嵁鍩�-{tc_code}-{utils.to_file_name(tc_name)}.json",
+                                       doc_text=doc_text, validation=validation)
+        result = json.loads(text)
+        pkt['children'] = result
+
+    def get_text_with_tc_name(self, tc_name: str):
+        entities = doc_dbh.get_entities_by_type('鎸囦护鏍煎紡閰嶇疆')
+        entity_names = '\n'.join([f'- {e.name}' for e in entities])
+        msg = f"""
+# 闇�姹�
+璇蜂粠涓嬪垪鎸囦护鍚嶇О涓尮閰嶄竴涓笌鈥渰tc_name}鈥濈浉浼煎害鏈�楂樼殑鎸囦护鍚嶇О銆�
+鎸囦护鍚嶇О鍒楄〃锛�
+{entity_names}
+"""
+        name = self.generate_text(msg,None)
+        entity = next(filter(lambda e: e.name == name, entities,None))
+        if entity:
+            return self.get_text_with_entity([entity.name])
+        else:
+            return ''
+
+
+def tc_data_generate():
+    exe_path = os.path.dirname(__file__) + "/db_tc_generator/InstructionGenerator.exe"
+    db_path = os.path.dirname(__file__) + "/db.db"
     try:
-        os.makedirs("./out/pkts", exist_ok=True)
+        # 瓒呮椂鏃堕棿240绉�
+        result = subprocess.run([exe_path, db_path], timeout=240)
+        print(result.stdout)
+        print(result.returncode)
+    except subprocess.TimeoutExpired:
+        print("璀﹀憡锛氭寚浠ゆ暟鎹敓鎴愬け璐ワ紒")
+
+
+def main():
+    try:
+        project_path = r'D:\projects\KnowledgeBase'
+        doc_dbh.set_project_path(project_path)
         # 鍚姩澶фā鍨嬪鐞嗘祦绋�
-        ret_text = DbStructFlow().run()
+        asyncio.run(DbStructFlow(f'{project_path}').run())
+        # 鐢熸垚鎸囦护鏁版嵁琛�
+        tc_data_generate()
     except KeyboardInterrupt:
         if g_completion:
             g_completion.close()
+
+
+if __name__ == '__main__':
+    main()

--
Gitblit v1.9.1