Browse Source

Merge pull request #3860 from myhloli/dev

feat: enhance API call parameters with conditional extra_body for thinking mode
Xiaomeng Zhao 3 tuần trước cách đây
mục cha
commit
7f96fa94b7
3 tập tin đã thay đổi với 17 bổ sung10 xóa
  1. 2 1
      mineru.template.json
  2. 2 1
      mineru/cli/client.py
  3. 13 8
      mineru/utils/llm_aided.py

+ 2 - 1
mineru.template.json

@@ -18,6 +18,7 @@
             "api_key": "your_api_key",
             "base_url": "https://dashscope.aliyuncs.com/compatible-mode/v1",
             "model": "qwen3-next-80b-a3b-instruct",
+            "enable_thinking": false,
             "enable": false
         }
     },
@@ -25,5 +26,5 @@
         "pipeline": "",
         "vlm": ""
     },
-    "config_version": "1.3.0"
+    "config_version": "1.3.1"
 }

+ 2 - 1
mineru/cli/client.py

@@ -126,7 +126,8 @@ if is_mac_os_version_supported():
     '--device',
     'device_mode',
     type=str,
-    help='Device mode for model inference, e.g., "cpu", "cuda", "cuda:0", "npu", "npu:0", "mps". Adapted only for the case where the backend is set to "pipeline". ',
+    help="""Device mode for model inference, e.g., "cpu", "cuda", "cuda:0", "npu", "npu:0", "mps".
+         Adapted only for the case where the backend is set to "pipeline" and "vlm-transformers". """,
     default=None,
 )
 @click.option(

+ 13 - 8
mineru/utils/llm_aided.py

@@ -84,16 +84,21 @@ Corrected title list:
     max_retries = 3
     dict_completion = None
 
+    # Build API call parameters
+    api_params = {
+        "model": title_aided_config["model"],
+        "messages": [{'role': 'user', 'content': title_optimize_prompt}],
+        "temperature": 0.7,
+        "stream": True,
+    }
+
+    # Only add extra_body when explicitly specified in config
+    if "enable_thinking" in title_aided_config:
+        api_params["extra_body"] = {"enable_thinking": title_aided_config["enable_thinking"]}
+
     while retry_count < max_retries:
         try:
-            completion = client.chat.completions.create(
-                model=title_aided_config["model"],
-                messages=[
-                    {'role': 'user', 'content': title_optimize_prompt}],
-                extra_body={"enable_thinking": False},
-                temperature=0.7,
-                stream=True,
-            )
+            completion = client.chat.completions.create(**api_params)
             content_pieces = []
             for chunk in completion:
                 if chunk.choices and chunk.choices[0].delta.content is not None: