Advertisement
Guest User

Untitled

a guest
May 1st, 2025
51
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 6.29 KB | None | 0 0
  1. ================================================================ FAILURES =================================================================
  2. ________________________________ HiDreamTransformerTests.test_torch_compile_recompilation_and_graph_break _________________________________
  3.  
  4. self = <tests.models.transformers.test_models_transformer_hidream.HiDreamTransformerTests testMethod=test_torch_compile_recompilation_and_graph_break>
  5.  
  6. @require_torch_gpu
  7. @require_torch_2
  8. @is_torch_compile
  9. @slow
  10. def test_torch_compile_recompilation_and_graph_break(self):
  11. torch._dynamo.reset()
  12. torch._dynamo.config.capture_dynamic_output_shape_ops = True
  13.  
  14. init_dict, inputs_dict = self.prepare_init_args_and_inputs_for_common()
  15.  
  16. model = self.model_class(**init_dict).to(torch_device)
  17. model = torch.compile(model, fullgraph=True)
  18.  
  19. with torch._dynamo.config.patch(error_on_recompile=True), torch.no_grad():
  20. > _ = model(**inputs_dict)
  21.  
  22. tests/models/transformers/test_models_transformer_hidream.py:116:
  23. _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
  24. ../.pyenv/versions/3.10.12/envs/diffusers/lib/python3.10/site-packages/torch/nn/modules/module.py:1751: in _wrapped_call_impl
  25. return self._call_impl(*args, **kwargs)
  26. ../.pyenv/versions/3.10.12/envs/diffusers/lib/python3.10/site-packages/torch/nn/modules/module.py:1762: in _call_impl
  27. return forward_call(*args, **kwargs)
  28. _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
  29.  
  30. args = ()
  31. kwargs = {'encoder_hidden_states_llama3': tensor([[[[ 0.5782, -0.0609, 0.1255, 0.2715],
  32. [ 0.8170, 0.0237, 0.7336,...7798],
  33. [ 0.1814, -1.2094, -1.3258, 0.5981, -0.6065, 0.5869, -0.8609, 0.2553]],
  34. device='cuda:0'), ...}
  35. prior = None, cleanups = [<function nothing at 0x7f4394b8f640>], prior_skip_guard_eval_unsafe = False, saved_dynamic_layer_stack_depth = 0
  36. cleanup = <function nothing at 0x7f4394b8f640>
  37.  
  38. @functools.wraps(fn)
  39. def _fn(*args, **kwargs):
  40. prior = set_eval_frame(None)
  41. try:
  42. if is_fx_tracing():
  43. if config.error_on_nested_fx_trace:
  44. raise RuntimeError(
  45. "Detected that you are using FX to symbolically trace "
  46. "a dynamo-optimized function. This is not supported at the moment."
  47. )
  48. else:
  49. return fn(*args, **kwargs)
  50.  
  51. if is_jit_tracing():
  52. raise RuntimeError(
  53. "Detected that you are using FX to torch.jit.trace "
  54. "a dynamo-optimized function. This is not supported at the moment."
  55. )
  56.  
  57. cleanups = [enter() for enter in self.enter_exit_hooks]
  58. prior_skip_guard_eval_unsafe = set_skip_guard_eval_unsafe(
  59. _is_skip_guard_eval_unsafe_stance()
  60. )
  61.  
  62. # Ensure that if an assertion occurs after graph pushes
  63. # something onto the DynamicLayerStack then we pop it off (the
  64. # constructed graph code isn't guarded with try/finally).
  65. #
  66. # This used to be a context but putting a `with` here is a noticible
  67. # perf regression (#126293)
  68. saved_dynamic_layer_stack_depth = (
  69. torch._C._functorch.get_dynamic_layer_stack_depth()
  70. )
  71. _maybe_set_eval_frame(_callback_from_stance(callback))
  72.  
  73. try:
  74. return fn(*args, **kwargs)
  75. except Unsupported as e:
  76. if config.verbose:
  77. raise
  78. > raise e.with_traceback(None) from None
  79. E torch._dynamo.exc.Unsupported: SKIPPED INLINING <code object moe_infer at 0x7f42c15ea3f0, file "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 388>:
  80. E
  81. E from user code:
  82. E File "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 888, in forward
  83. E hidden_states, initial_encoder_hidden_states = block(
  84. E File "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 598, in forward
  85. E return self.block(
  86. E File "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 578, in forward
  87. E ff_output_i = gate_mlp_i * self.ff_i(norm_hidden_states)
  88. E File "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 384, in forward
  89. E y = self.moe_infer(x, flat_topk_idx, topk_weight.view(-1, 1)).view(*orig_shape)
  90. E File "/home/sayak/.pyenv/versions/3.10.12/envs/diffusers/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
  91. E return func(*args, **kwargs)
  92. E
  93. E Set TORCHDYNAMO_VERBOSE=1 for the internal stack trace (please do this especially if you're reporting a bug to PyTorch). For even more developer context, set TORCH_LOGS="+dynamo"
  94.  
  95. ../.pyenv/versions/3.10.12/envs/diffusers/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:659: Unsupported
  96. ---------------------------------------------------------- Captured stderr call -----------------------------------------------------------
  97. W0501 15:33:46.767000 765883 torch/fx/experimental/symbolic_shapes.py:6679] [0/0] failed during evaluate_expr(u0, hint=None, size_oblivious=False, forcing_spec=False
  98. E0501 15:33:46.767000 765883 torch/fx/experimental/recording.py:299] [0/0] failed while running evaluate_expr(*(u0, None, False, False), **{})
  99. ========================================================= short test summary info =========================================================
  100. FAILED tests/models/transformers/test_models_transformer_hidream.py::HiDreamTransformerTests::test_torch_compile_recompilation_and_graph_break - torch._dynamo.exc.Unsupported: SKIPPED INLINING <code object moe_infer at 0x7f42c15ea3f0, file "/home/sayak/diffusers/src/diffusers/mo...
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement