Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- ================================================================ FAILURES =================================================================
- ________________________________ HiDreamTransformerTests.test_torch_compile_recompilation_and_graph_break _________________________________
- self = <tests.models.transformers.test_models_transformer_hidream.HiDreamTransformerTests testMethod=test_torch_compile_recompilation_and_graph_break>
- @require_torch_gpu
- @require_torch_2
- @is_torch_compile
- @slow
- def test_torch_compile_recompilation_and_graph_break(self):
- torch._dynamo.reset()
- torch._dynamo.config.capture_dynamic_output_shape_ops = True
- init_dict, inputs_dict = self.prepare_init_args_and_inputs_for_common()
- model = self.model_class(**init_dict).to(torch_device)
- model = torch.compile(model, fullgraph=True)
- with torch._dynamo.config.patch(error_on_recompile=True), torch.no_grad():
- > _ = model(**inputs_dict)
- tests/models/transformers/test_models_transformer_hidream.py:116:
- _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
- ../.pyenv/versions/3.10.12/envs/diffusers/lib/python3.10/site-packages/torch/nn/modules/module.py:1751: in _wrapped_call_impl
- return self._call_impl(*args, **kwargs)
- ../.pyenv/versions/3.10.12/envs/diffusers/lib/python3.10/site-packages/torch/nn/modules/module.py:1762: in _call_impl
- return forward_call(*args, **kwargs)
- _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
- args = ()
- kwargs = {'encoder_hidden_states_llama3': tensor([[[[ 0.5782, -0.0609, 0.1255, 0.2715],
- [ 0.8170, 0.0237, 0.7336,...7798],
- [ 0.1814, -1.2094, -1.3258, 0.5981, -0.6065, 0.5869, -0.8609, 0.2553]],
- device='cuda:0'), ...}
- prior = None, cleanups = [<function nothing at 0x7f4394b8f640>], prior_skip_guard_eval_unsafe = False, saved_dynamic_layer_stack_depth = 0
- cleanup = <function nothing at 0x7f4394b8f640>
- @functools.wraps(fn)
- def _fn(*args, **kwargs):
- prior = set_eval_frame(None)
- try:
- if is_fx_tracing():
- if config.error_on_nested_fx_trace:
- raise RuntimeError(
- "Detected that you are using FX to symbolically trace "
- "a dynamo-optimized function. This is not supported at the moment."
- )
- else:
- return fn(*args, **kwargs)
- if is_jit_tracing():
- raise RuntimeError(
- "Detected that you are using FX to torch.jit.trace "
- "a dynamo-optimized function. This is not supported at the moment."
- )
- cleanups = [enter() for enter in self.enter_exit_hooks]
- prior_skip_guard_eval_unsafe = set_skip_guard_eval_unsafe(
- _is_skip_guard_eval_unsafe_stance()
- )
- # Ensure that if an assertion occurs after graph pushes
- # something onto the DynamicLayerStack then we pop it off (the
- # constructed graph code isn't guarded with try/finally).
- #
- # This used to be a context but putting a `with` here is a noticible
- # perf regression (#126293)
- saved_dynamic_layer_stack_depth = (
- torch._C._functorch.get_dynamic_layer_stack_depth()
- )
- _maybe_set_eval_frame(_callback_from_stance(callback))
- try:
- return fn(*args, **kwargs)
- except Unsupported as e:
- if config.verbose:
- raise
- > raise e.with_traceback(None) from None
- E torch._dynamo.exc.Unsupported: SKIPPED INLINING <code object moe_infer at 0x7f42c15ea3f0, file "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 388>:
- E
- E from user code:
- E File "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 888, in forward
- E hidden_states, initial_encoder_hidden_states = block(
- E File "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 598, in forward
- E return self.block(
- E File "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 578, in forward
- E ff_output_i = gate_mlp_i * self.ff_i(norm_hidden_states)
- E File "/home/sayak/diffusers/src/diffusers/models/transformers/transformer_hidream_image.py", line 384, in forward
- E y = self.moe_infer(x, flat_topk_idx, topk_weight.view(-1, 1)).view(*orig_shape)
- E File "/home/sayak/.pyenv/versions/3.10.12/envs/diffusers/lib/python3.10/site-packages/torch/utils/_contextlib.py", line 116, in decorate_context
- E return func(*args, **kwargs)
- E
- E Set TORCHDYNAMO_VERBOSE=1 for the internal stack trace (please do this especially if you're reporting a bug to PyTorch). For even more developer context, set TORCH_LOGS="+dynamo"
- ../.pyenv/versions/3.10.12/envs/diffusers/lib/python3.10/site-packages/torch/_dynamo/eval_frame.py:659: Unsupported
- ---------------------------------------------------------- Captured stderr call -----------------------------------------------------------
- W0501 15:33:46.767000 765883 torch/fx/experimental/symbolic_shapes.py:6679] [0/0] failed during evaluate_expr(u0, hint=None, size_oblivious=False, forcing_spec=False
- E0501 15:33:46.767000 765883 torch/fx/experimental/recording.py:299] [0/0] failed while running evaluate_expr(*(u0, None, False, False), **{})
- ========================================================= short test summary info =========================================================
- FAILED tests/models/transformers/test_models_transformer_hidream.py::HiDreamTransformerTests::test_torch_compile_recompilation_and_graph_break - torch._dynamo.exc.Unsupported: SKIPPED INLINING <code object moe_infer at 0x7f42c15ea3f0, file "/home/sayak/diffusers/src/diffusers/mo...
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement