Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- Running 1 job
- WARNING:torchao.kernel.intmm:Warning: Detected no triton, on systems without Triton certain kernels will not work
- WARNING:torchao.kernel.intmm:Warning: Detected no triton, on systems without Triton certain kernels will not work
- D:\AI\ai-toolkit\venv\Lib\site-packages\torch\amp\autocast_mode.py:266: UserWarning: User provided device_type of 'cuda', but CUDA is not available. Disabling
- warnings.warn(
- D:\AI\ai-toolkit\venv\Lib\site-packages\torch\amp\autocast_mode.py:266: UserWarning: User provided device_type of 'cuda', but CUDA is not available. Disabling
- warnings.warn(
- No ROCm runtime is found, using ROCM_HOME='C:\Program Files\AMD\ROCm\6.4'No ROCm runtime is found, using ROCM_HOME='C:\Program Files\AMD\ROCm\6.4'
- Error running job: Building PyTorch extensions using ROCm and Windows is not supported.
- Error running on_error: cannot access local variable 'job' where it is not associated with a value
- ========================================
- Result:
- - 0 completed jobs
- - 1 failure
- ========================================
- Traceback (most recent call last):
- Traceback (most recent call last):
- File "D:\AI\ai-toolkit\run.py", line 142, in <module>
- File "D:\AI\ai-toolkit\run.py", line 142, in <module>
- main()main()
- File "D:\AI\ai-toolkit\run.py", line 130, in main
- File "D:\AI\ai-toolkit\run.py", line 130, in main
- raise eraise e
- File "D:\AI\ai-toolkit\run.py", line 117, in main
- File "D:\AI\ai-toolkit\run.py", line 117, in main
- job = get_job(config_file, args.name)job = get_job(config_file, args.name)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "D:\AI\ai-toolkit\toolkit\job.py", line 28, in get_job
- File "D:\AI\ai-toolkit\toolkit\job.py", line 28, in get_job
- from jobs import ExtensionJobfrom jobs import ExtensionJob
- File "D:\AI\ai-toolkit\jobs\__init__.py", line 1, in <module>
- File "D:\AI\ai-toolkit\jobs\__init__.py", line 1, in <module>
- from .BaseJob import BaseJobfrom .BaseJob import BaseJob
- File "D:\AI\ai-toolkit\jobs\BaseJob.py", line 5, in <module>
- File "D:\AI\ai-toolkit\jobs\BaseJob.py", line 5, in <module>
- from jobs.process import BaseProcessfrom jobs.process import BaseProcess
- File "D:\AI\ai-toolkit\jobs\process\__init__.py", line 8, in <module>
- File "D:\AI\ai-toolkit\jobs\process\__init__.py", line 8, in <module>
- from .TrainSliderProcess import TrainSliderProcessfrom .TrainSliderProcess import TrainSliderProcess
- File "D:\AI\ai-toolkit\jobs\process\TrainSliderProcess.py", line 26, in <module>
- File "D:\AI\ai-toolkit\jobs\process\TrainSliderProcess.py", line 26, in <module>
- from .BaseSDTrainProcess import BaseSDTrainProcessfrom .BaseSDTrainProcess import BaseSDTrainProcess
- File "D:\AI\ai-toolkit\jobs\process\BaseSDTrainProcess.py", line 28, in <module>
- File "D:\AI\ai-toolkit\jobs\process\BaseSDTrainProcess.py", line 28, in <module>
- from toolkit.custom_adapter import CustomAdapterfrom toolkit.custom_adapter import CustomAdapter
- File "D:\AI\ai-toolkit\toolkit\custom_adapter.py", line 13, in <module>
- File "D:\AI\ai-toolkit\toolkit\custom_adapter.py", line 13, in <module>
- from toolkit.models.control_lora_adapter import ControlLoraAdapterfrom toolkit.models.control_lora_adapter import ControlLoraAdapter
- File "D:\AI\ai-toolkit\toolkit\models\control_lora_adapter.py", line 5, in <module>
- File "D:\AI\ai-toolkit\toolkit\models\control_lora_adapter.py", line 5, in <module>
- from toolkit.lora_special import LoRASpecialNetworkfrom toolkit.lora_special import LoRASpecialNetwork
- File "D:\AI\ai-toolkit\toolkit\lora_special.py", line 12, in <module>
- File "D:\AI\ai-toolkit\toolkit\lora_special.py", line 12, in <module>
- from toolkit.models.lokr import LokrModulefrom toolkit.models.lokr import LokrModule
- File "D:\AI\ai-toolkit\toolkit\models\lokr.py", line 9, in <module>
- File "D:\AI\ai-toolkit\toolkit\models\lokr.py", line 9, in <module>
- from toolkit.network_mixins import ToolkitModuleMixinfrom toolkit.network_mixins import ToolkitModuleMixin
- File "D:\AI\ai-toolkit\toolkit\network_mixins.py", line 7, in <module>
- File "D:\AI\ai-toolkit\toolkit\network_mixins.py", line 7, in <module>
- from optimum.quanto import QTensorfrom optimum.quanto import QTensor
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\__init__.py", line 18, in <module>
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\__init__.py", line 18, in <module>
- from .library import *from .library import *
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\library\__init__.py", line 15, in <module>
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\library\__init__.py", line 15, in <module>
- from .extensions import *from .extensions import *
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\library\extensions\__init__.py", line 17, in <module>
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\library\extensions\__init__.py", line 17, in <module>
- from .cpp import *from .cpp import *
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\library\extensions\cpp\__init__.py", line 19, in <module>
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\library\extensions\cpp\__init__.py", line 19, in <module>
- from ..extension import Extensionfrom ..extension import Extension
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\library\extensions\extension.py", line 7, in <module>
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\optimum\quanto\library\extensions\extension.py", line 7, in <module>
- from torch.utils.cpp_extension import loadfrom torch.utils.cpp_extension import load
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\torch\utils\cpp_extension.py", line 240, in <module>
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\torch\utils\cpp_extension.py", line 240, in <module>
- HIP_HOME = _join_rocm_home('hip') if ROCM_HOME else NoneHIP_HOME = _join_rocm_home('hip') if ROCM_HOME else None
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\torch\utils\cpp_extension.py", line 177, in _join_rocm_home
- File "D:\AI\ai-toolkit\venv\Lib\site-packages\torch\utils\cpp_extension.py", line 177, in _join_rocm_home
- raise OSError('Building PyTorch extensions using 'raise OSError('Building PyTorch extensions using '
- OSErrorOSError: : Building PyTorch extensions using ROCm and Windows is not supported.Building PyTorch extensions using ROCm and Windows is not supported.
Advertisement
Add Comment
Please, Sign In to add comment