mirror of
https://github.com/hiyouga/LlamaFactory.git
synced 2026-03-27 04:37:43 +08:00
[deps] goodbye python 3.9 (#9677)
Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: hiyouga <16256802+hiyouga@users.noreply.github.com> Co-authored-by: hiyouga <hiyouga@buaa.edu.cn>
This commit is contained in:
@@ -16,7 +16,7 @@ import json
|
||||
import os
|
||||
from collections.abc import Generator
|
||||
from contextlib import contextmanager
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from transformers.utils import is_torch_npu_available
|
||||
|
||||
@@ -81,7 +81,7 @@ class WebChatModel(ChatModel):
|
||||
def __init__(self, manager: "Manager", demo_mode: bool = False, lazy_init: bool = True) -> None:
|
||||
self.manager = manager
|
||||
self.demo_mode = demo_mode
|
||||
self.engine: Optional[BaseEngine] = None
|
||||
self.engine: BaseEngine | None = None
|
||||
|
||||
if not lazy_init: # read arguments from command line
|
||||
super().__init__()
|
||||
@@ -197,9 +197,9 @@ class WebChatModel(ChatModel):
|
||||
lang: str,
|
||||
system: str,
|
||||
tools: str,
|
||||
image: Optional[Any],
|
||||
video: Optional[Any],
|
||||
audio: Optional[Any],
|
||||
image: Any | None,
|
||||
video: Any | None,
|
||||
audio: Any | None,
|
||||
max_new_tokens: int,
|
||||
top_p: float,
|
||||
temperature: float,
|
||||
|
||||
@@ -17,7 +17,7 @@ import os
|
||||
import signal
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional, Union
|
||||
from typing import Any
|
||||
|
||||
from psutil import Process
|
||||
from yaml import safe_dump, safe_load
|
||||
@@ -71,7 +71,7 @@ def _get_config_path() -> os.PathLike:
|
||||
return os.path.join(DEFAULT_CACHE_DIR, USER_CONFIG)
|
||||
|
||||
|
||||
def load_config() -> dict[str, Union[str, dict[str, Any]]]:
|
||||
def load_config() -> dict[str, str | dict[str, Any]]:
|
||||
r"""Load user config if exists."""
|
||||
try:
|
||||
with open(_get_config_path(), encoding="utf-8") as f:
|
||||
@@ -81,7 +81,7 @@ def load_config() -> dict[str, Union[str, dict[str, Any]]]:
|
||||
|
||||
|
||||
def save_config(
|
||||
lang: str, hub_name: Optional[str] = None, model_name: Optional[str] = None, model_path: Optional[str] = None
|
||||
lang: str, hub_name: str | None = None, model_name: str | None = None, model_path: str | None = None
|
||||
) -> None:
|
||||
r"""Save user config."""
|
||||
os.makedirs(DEFAULT_CACHE_DIR, exist_ok=True)
|
||||
@@ -151,7 +151,7 @@ def load_dataset_info(dataset_dir: str) -> dict[str, dict[str, Any]]:
|
||||
return {}
|
||||
|
||||
|
||||
def load_args(config_path: str) -> Optional[dict[str, Any]]:
|
||||
def load_args(config_path: str) -> dict[str, Any] | None:
|
||||
r"""Load the training configuration from config path."""
|
||||
try:
|
||||
with open(config_path, encoding="utf-8") as f:
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
import json
|
||||
from collections.abc import Generator
|
||||
from typing import TYPE_CHECKING, Union
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ...extras.constants import PEFT_METHODS
|
||||
from ...extras.misc import torch_gc
|
||||
@@ -37,7 +37,7 @@ if TYPE_CHECKING:
|
||||
GPTQ_BITS = ["8", "4", "3", "2"]
|
||||
|
||||
|
||||
def can_quantize(checkpoint_path: Union[str, list[str]]) -> "gr.Dropdown":
|
||||
def can_quantize(checkpoint_path: str | list[str]) -> "gr.Dropdown":
|
||||
if isinstance(checkpoint_path, list) and len(checkpoint_path) != 0:
|
||||
return gr.Dropdown(value="none", interactive=False)
|
||||
else:
|
||||
@@ -49,7 +49,7 @@ def save_model(
|
||||
model_name: str,
|
||||
model_path: str,
|
||||
finetuning_type: str,
|
||||
checkpoint_path: Union[str, list[str]],
|
||||
checkpoint_path: str | list[str],
|
||||
template: str,
|
||||
export_size: int,
|
||||
export_quantization_bit: str,
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from transformers.trainer_utils import get_last_checkpoint
|
||||
|
||||
@@ -206,7 +206,7 @@ def list_datasets(dataset_dir: str = None, training_stage: str = list(TRAINING_S
|
||||
return gr.Dropdown(choices=datasets)
|
||||
|
||||
|
||||
def list_output_dirs(model_name: Optional[str], finetuning_type: str, current_time: str) -> "gr.Dropdown":
|
||||
def list_output_dirs(model_name: str | None, finetuning_type: str, current_time: str) -> "gr.Dropdown":
|
||||
r"""List all the directories that can resume from.
|
||||
|
||||
Inputs: top.model_name, top.finetuning_type, train.current_time
|
||||
|
||||
@@ -17,7 +17,7 @@ import os
|
||||
from collections.abc import Generator
|
||||
from copy import deepcopy
|
||||
from subprocess import PIPE, Popen, TimeoutExpired
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from transformers.utils import is_torch_npu_available
|
||||
|
||||
@@ -59,7 +59,7 @@ class Runner:
|
||||
self.manager = manager
|
||||
self.demo_mode = demo_mode
|
||||
""" Resume """
|
||||
self.trainer: Optional[Popen] = None
|
||||
self.trainer: Popen | None = None
|
||||
self.do_train = True
|
||||
self.running_data: dict[Component, Any] = None
|
||||
""" State """
|
||||
|
||||
Reference in New Issue
Block a user